diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml index c76a1bff..d63b5793 100755 --- a/.ci/test-matrix.yml +++ b/.ci/test-matrix.yml @@ -2,9 +2,6 @@ TEST_SUITE: - oss PYTHON_VERSION: - - "2.7" - - "3.4" - - "3.5" - "3.6" - "3.7" - "3.8" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 986b164c..13f8a349 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ # This should match the team set up in https://github.com/orgs/opensearch-project/teams and include any additional contributors -* @VachaShah @dblock @harshavamsi @axeoman @deztructor @Shephalimittal @saimedhi \ No newline at end of file +* @VachaShah @dblock @harshavamsi @axeoman @deztructor @Shephalimittal @saimedhi @florianvazelle \ No newline at end of file diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 89d9f46c..106e940a 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0' ] + opensearch_version: [ '1.0.1', '1.1.0', '1.2.4', '1.3.7', '2.0.1', '2.1.0', '2.2.1', '2.3.0', '2.4.0', '2.5.0', '2.6.0', '2.7.0', '2.8.0', '2.9.0', '2.10.0' ] secured: [ "true", "false" ] steps: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4b672604..bd0ac738 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,6 @@ jobs: strategy: matrix: entry: - - { os: 'ubuntu-20.04', python-version: "3.5" } - { os: 'ubuntu-20.04', python-version: "3.6" } - { os: 'ubuntu-latest', python-version: "3.7" } - { os: 'ubuntu-latest', python-version: "3.8" } @@ -16,7 +15,7 @@ jobs: - { os: 'ubuntu-latest', python-version: "3.11" } - { os: 'macos-latest', python-version: "3.11" } - name: test (ruby=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) + name: test (os=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) continue-on-error: ${{ matrix.entry.experimental || false }} runs-on: ${{ matrix.entry.os }} steps: diff --git a/.github/workflows/unified-release.yml b/.github/workflows/unified-release.yml index 0d75639e..cddea14a 100644 --- a/.github/workflows/unified-release.yml +++ b/.github/workflows/unified-release.yml @@ -9,7 +9,7 @@ jobs: strategy: fail-fast: false matrix: - stack_version: ['2.3.0'] + stack_version: ['2.3.2'] steps: - name: Checkout diff --git a/.gitignore b/.gitignore index 019de716..153eea24 100644 --- a/.gitignore +++ b/.gitignore @@ -148,6 +148,9 @@ test_opensearch/cover test_opensearch/local.py .ci/output -#Vi text editor +# vi text editor .*.swp *~ + +# Visual Studio Code +.vscode \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index f082078f..c32a0d17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,15 +4,57 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ## [Unreleased] ### Added - Added generating imports and headers to API generator ([#467](https://github.com/opensearch-project/opensearch-py/pull/467)) +- Added point-in-time APIs (create_pit, delete_pit, delete_all_pits, get_all_pits) and Security Client APIs (health and update_audit_configuration) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) +- Added guide on using index templates ([#531](https://github.com/opensearch-project/opensearch-py/pull/531)) +- Added `pool_maxsize` for `Urllib3HttpConnection` ([#535](https://github.com/opensearch-project/opensearch-py/pull/535)) +- Added benchmarks ([#537](https://github.com/opensearch-project/opensearch-py/pull/537)) +- Added guide on making raw JSON REST requests ([#542](https://github.com/opensearch-project/opensearch-py/pull/542)) +- Added support for AWS SigV4 for urllib3 ([#547](https://github.com/opensearch-project/opensearch-py/pull/547)) +- Added `remote store` client APIs ([#552](https://github.com/opensearch-project/opensearch-py/pull/552)) +- Added `nox -rs generate` ([#554](https://github.com/opensearch-project/opensearch-py/pull/554)) +- Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) +- Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) +- Added guide on the document lifecycle API(s) ([#559](https://github.com/opensearch-project/opensearch-py/pull/559)) ### Changed +- Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) +- Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) +- Generate `dangling_indices` client from API specs ([#511](https://github.com/opensearch-project/opensearch-py/pull/511)) +- Generate `cluster` client from API specs ([#530](https://github.com/opensearch-project/opensearch-py/pull/530)) +- Generate `nodes` client from API specs ([#514](https://github.com/opensearch-project/opensearch-py/pull/514)) +- Generate `cat` client from API specs ([#529](https://github.com/opensearch-project/opensearch-py/pull/529)) +- Use API generator for all APIs ([#551](https://github.com/opensearch-project/opensearch-py/pull/551)) +- Merge `.pyi` type stubs inline ([#563](https://github.com/opensearch-project/opensearch-py/pull/563)) +- Expanded type coverage to benchmarks, samples and tests ([#566](https://github.com/opensearch-project/opensearch-py/pull/566)) +- Defaulted `enable_cleanup_closed=True` in `aiohttp.TCPConnector` to prevent TLS connection leaks ([#468](https://github.com/opensearch-project/opensearch-py/pull/468)) ### Deprecated +- Deprecated point-in-time APIs (list_all_point_in_time, create_point_in_time, delete_point_in_time) and Security Client APIs (health_check and update_audit_config) ([#502](https://github.com/opensearch-project/opensearch-py/pull/502)) ### Removed +- Removed leftover support for Python 2.7 ([#548](https://github.com/opensearch-project/opensearch-py/pull/548)) ### Fixed -- Added `enable_cleanup_closed=True` argument to `aiohttp.TCPConnector` to fix TLS connection leaks ([#468](https://github.com/opensearch-project/opensearch-py/pull/468)) ### Security ### Dependencies -- Bumps `sphinx` from <7.1 to <7.2 -- Bumps `urllib3` from >=1.21.1, <2 to >=1.21.1 ([#466](https://github.com/opensearch-project/opensearch-py/pull/466)) +- Bumps `sphinx` from <7.1 to <7.3 + +## [2.3.2] +### Added +### Changed +### Deprecated +### Removed +### Fixed +### Security +### Dependencies +- Bumps `urllib3` from >=1.21.1, <2 to >=1.26.9 ([#518](https://github.com/opensearch-project/opensearch-py/pull/518)) + +## [2.3.1] +### Added +### Changed +### Deprecated +### Removed +### Fixed +- Fixed race condition in AWSV4SignerAuth & AWSV4SignerAsyncAuth when using refreshable credentials ([#470](https://github.com/opensearch-project/opensearch-py/pull/470)) +### Security +### Dependencies +- Bumps `urllib3` from >= 1.26.9 to >= 1.26.17 [#533](https://github.com/opensearch-project/opensearch-py/pull/533) ## [2.3.0] ### Added @@ -24,7 +66,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added support for the security plugin ([#399](https://github.com/opensearch-project/opensearch-py/pull/399)) - Supports OpenSearch 2.1.0 - 2.6.0 ([#381](https://github.com/opensearch-project/opensearch-py/pull/381)) - Added `allow_redirects` to `RequestsHttpConnection#perform_request` ([#401](https://github.com/opensearch-project/opensearch-py/pull/401)) -- Enhanced YAML test runner to use OpenSearch `rest-api-spec` YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414) +- Enhanced YAML test runner to use OpenSearch `rest-api-spec` YAML tests ([#414](https://github.com/opensearch-project/opensearch-py/pull/414)) - Added `Search#collapse` ([#409](https://github.com/opensearch-project/opensearch-py/issues/409)) - Added support for the ISM API ([#398](https://github.com/opensearch-project/opensearch-py/pull/398)) - Added `trust_env` to `AIOHttpConnection` ([#398](https://github.com/opensearch-project/opensearch-py/pull/438)) @@ -39,6 +81,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) ### Deprecated ### Removed - Removed support for Python 2.7 ([#421](https://github.com/opensearch-project/opensearch-py/pull/421)) +- Removed support for Python 3.5 ([#533](https://github.com/opensearch-project/opensearch-py/pull/533)) ### Fixed - Fixed flaky CI tests by replacing httpbin with a simple http_server ([#395](https://github.com/opensearch-project/opensearch-py/pull/395)) - Fixed import cycle when importing async helpers ([#311](https://github.com/opensearch-project/opensearch-py/pull/311)) @@ -47,6 +90,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Include parsed error info in `TransportError` in async connections ([#226](https://github.com/opensearch-project/opensearch-py/pull/226)) - Enhanced existing API generator to use OpenSearch OpenAPI spec ([#412](https://github.com/opensearch-project/opensearch-py/pull/412)) - Fix crash when attempting to authenticate with an async connection ([#424](https://github.com/opensearch-project/opensearch-py/pull/424)) +- Fixed poetry run command issue on Windows/Mac machines ([#494](https://github.com/opensearch-project/opensearch-py/pull/494)) ### Security - Fixed CVE-2022-23491 reported in opensearch-dsl-py ([#295](https://github.com/opensearch-project/opensearch-py/pull/295)) ### Dependencies @@ -109,11 +153,14 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Removed patch versions in integration tests for OpenSearch 1.0.0 - 2.3.0 to reduce Github Action jobs ([#262](https://github.com/opensearch-project/opensearch-py/pull/262)) ### Fixed - Fixed DeprecationWarning emitted from urllib3 1.26.13+ ([#246](https://github.com/opensearch-project/opensearch-py/pull/246)) +- Fixed Wrong return type hint in `async_scan` ([520](https://github.com/opensearch-project/opensearch-py/pull/520)) ### Security -[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...HEAD +[Unreleased]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.2...HEAD [2.0.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.0...v2.0.1 [2.1.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.0.1...v2.1.0 [2.1.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.0...v2.1.1 [2.2.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.1.1...v2.2.0 [2.3.0]: https://github.com/opensearch-project/opensearch-py/compare/v2.2.0...v2.3.0 +[2.3.1]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.0...v2.3.1 +[2.3.2]: https://github.com/opensearch-project/opensearch-py/compare/v2.3.1...v2.3.2 diff --git a/COMPATIBILITY.md b/COMPATIBILITY.md index bc39275a..0634f6cc 100644 --- a/COMPATIBILITY.md +++ b/COMPATIBILITY.md @@ -9,10 +9,12 @@ The below matrix shows the compatibility of the [`opensearch-py`](https://pypi.o | --- | --- | --- | | 1.0.0 | 1.0.0-1.2.4 | | | 1.1.0 | 1.3.0-1.3.7 | | -| 2.0.x | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.1.x | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.2.0 | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | -| 2.3.0 | 1.0.0-2.8.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.0.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.1.x | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.2.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.0 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.1 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | +| 2.3.2 | 1.0.0-2.10.0 | client works against Opensearch Version 1.x as long as features removed in 2.0 are not used | ## Upgrading diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index ec39602f..f6cb568c 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -1,10 +1,10 @@ - [Developer Guide](#developer-guide) - [Prerequisites](#prerequisites) - - [Docker Image Installation](#docker-setup) + - [Install Docker Image](#install-docker-image) - [Running Tests](#running-tests) - - [Integration Tests](#integration-tests) + - [Linter](#linter) - [Documentation](#documentation) - - [Running Python Client Generator](#running-python-client-generator) + - [Client Code Generator](#client-code-generator) # Developer Guide @@ -45,7 +45,19 @@ docker run -d -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" opensear Tests require a live instance of OpenSearch running in docker. -This will start a new instance and run tests against the latest version of OpenSearch. +If you have one running. + +``` +python setup.py test +``` + +To run tests in a specific test file. + +``` +python setup.py test -s test_opensearchpy/test_connection.py +``` + +If you want to auto-start one, the following will start a new instance and run tests against the latest version of OpenSearch. ``` ./.ci/run-tests @@ -54,9 +66,11 @@ This will start a new instance and run tests against the latest version of OpenS If your OpenSearch docker instance is running, you can execute the test suite directly. ``` -$ nox -rs test +$ nox -rs test-3.9 ``` +Substitute `3.9` with your Python version above, or use `nox -rs test` to run with multiple. + To run tests against different versions of OpenSearch, use `run-tests [with/without security] [version]`: ``` @@ -76,7 +90,7 @@ You can also run individual tests matching a pattern (`pytest -k [pattern]`). ``` ./.ci/run-tests true 1.3.0 test_no_http_compression -test_opensearchpy/test_connection.py::TestUrllib3Connection::test_no_http_compression PASSED [ 33%] +test_opensearchpy/test_connection.py::TestUrllib3HttpConnection::test_no_http_compression PASSED [ 33%] test_opensearchpy/test_connection.py::TestRequestsConnection::test_no_http_compression PASSED [ 66%] test_opensearchpy/test_async/test_connection.py::TestAIOHttpConnection::test_no_http_compression PASSED [100%] ``` @@ -103,12 +117,10 @@ make html Open `opensearch-py/docs/build/html/index.html` to see results. -## Running Python Client Generator +## Client Code Generator -The following code executes a python client generator that updates the client by utilizing the [openapi specifications](https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json) found in the "opensearch-api-specification" repository. This process allows for the automatic generation and synchronization of the client code with the latest API specifications. +OpenSearch publishes an [OpenAPI specification](https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json) in the [opensearch-api-specification](https://github.com/opensearch-project/opensearch-api-specification) repository, which is used to auto-generate the less interesting parts of the client. ``` -cd opensearch-py -python utils/generate-api.py -nox -rs format +nox -rs generate ``` diff --git a/MAINTAINERS.md b/MAINTAINERS.md index dcf3600a..b4115274 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -13,6 +13,7 @@ This document contains a list of maintainers in this repo. See [opensearch-proje | Denis Zalevskiy | [deztructor](https://github.com/deztructor) | Aiven | | Shephali Mittal | [Shephalimittal](https://github.com/Shephalimittal) | Amazon | | Sai Medhini Reddy Maryada | [saimedhi](https://github.com/saimedhi) | Amazon | +| Florian Vazelle | [florianvazelle](https://github.com/florianvazelle) | harfanglab | ## Emeritus diff --git a/MANIFEST.in b/MANIFEST.in index 40d49135..9f446e08 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,7 +5,7 @@ include LICENSE include MANIFEST.in include README.md include setup.py -recursive-include opensearch* py.typed *.pyi +recursive-include opensearch* py.typed prune test_opensearch recursive-exclude * __pycache__ diff --git a/README.md b/README.md index 7ecaea56..e4524469 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ For more information, see [opensearch.org](https://opensearch.org/) and the [API ## User Guide -To get started with the OpenSearch Python Client, see [User Guide](https://github.com/opensearch-project/opensearch-py/blob/main/USER_GUIDE.md). +To get started with the OpenSearch Python Client, see [User Guide](https://github.com/opensearch-project/opensearch-py/blob/main/USER_GUIDE.md). This repository also contains [working samples](https://github.com/opensearch-project/opensearch-py/tree/main/samples) and [benchmarks](https://github.com/opensearch-project/opensearch-py/tree/main/benchmarks). ## Compatibility with OpenSearch diff --git a/USER_GUIDE.md b/USER_GUIDE.md index 4b6b89c2..84fe4d5b 100644 --- a/USER_GUIDE.md +++ b/USER_GUIDE.md @@ -153,6 +153,11 @@ print(response) - [Search](guides/search.md) - [Point in Time](guides/point_in_time.md) - [Using a Proxy](guides/proxy.md) +- [Index Templates](guides/index_template.md) +- [Advanced Index Actions](guides/advanced_index_actions.md) +- [Making Raw JSON REST Requests](guides/json.md) +- [Connection Classes](guides/connection_classes.md) +- [Document Lifecycle](guides/document_lifecycle.md) ## Plugins diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 00000000..1d21d851 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,63 @@ +- [Benchmarks](#benchmarks) + - [Start OpenSearch](#start-opensearch) + - [Install Prerequisites](#install-prerequisites) + - [Run Benchmarks](#run-benchmarks) + +## Benchmarks + +Python client benchmarks using [richbench](https://github.com/tonybaloney/rich-bench). + +### Start OpenSearch + +``` +docker run -p 9200:9200 -e "discovery.type=single-node" opensearchproject/opensearch:latest +``` + +### Install Prerequisites + +Install [poetry](https://python-poetry.org/docs/), then install package dependencies. + +``` +poetry install +``` + +Benchmarks use the code in this repository by specifying the dependency as `opensearch-py = { path = "..", develop=true, extras=["async"] }` in [pyproject.toml](pyproject.toml). + +### Run Benchmarks + +Run all benchmarks available as follows. + +``` +poetry run richbench . --repeat 1 --times 1 +``` + +Outputs results from all the runs. + +``` + Benchmarks, repeat=1, number=1 +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓ +┃ Benchmark ┃ Min ┃ Max ┃ Mean ┃ Min (+) ┃ Max (+) ┃ Mean (+) ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩ +│ 1 client vs. more clients (async) │ 1.640 │ 1.640 │ 1.640 │ 1.102 (1.5x) │ 1.102 (1.5x) │ 1.102 (1.5x) │ +│ 1 thread vs. 32 threads (sync) │ 5.526 │ 5.526 │ 5.526 │ 1.626 (3.4x) │ 1.626 (3.4x) │ 1.626 (3.4x) │ +│ 1 thread vs. 32 threads (sync) │ 4.639 │ 4.639 │ 4.639 │ 3.363 (1.4x) │ 3.363 (1.4x) │ 3.363 (1.4x) │ +│ sync vs. async (8) │ 3.198 │ 3.198 │ 3.198 │ 0.966 (3.3x) │ 0.966 (3.3x) │ 0.966 (3.3x) │ +└───────────────────────────────────┴─────────┴─────────┴─────────┴─────────────────┴─────────────────┴─────────────────┘ +``` + +Run a specific benchmark, e.g. [bench_sync.py](bench_sync.py) by specifying `--benchmark [name]`. + +``` +poetry run richbench . --repeat 1 --times 1 --benchmark sync +``` + +Outputs results from one benchmark. + +``` + Benchmarks, repeat=1, number=1 +┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓ +┃ Benchmark ┃ Min ┃ Max ┃ Mean ┃ Min (+) ┃ Max (+) ┃ Mean (+) ┃ +┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩ +│ 1 thread vs. 32 threads (sync) │ 6.804 │ 6.804 │ 6.804 │ 3.409 (2.0x) │ 3.409 (2.0x) │ 3.409 (2.0x) │ +└────────────────────────────────┴─────────┴─────────┴─────────┴─────────────────┴─────────────────┴─────────────────┘ +``` diff --git a/benchmarks/bench_async.py b/benchmarks/bench_async.py new file mode 100644 index 00000000..baeb7d80 --- /dev/null +++ b/benchmarks/bench_async.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import asyncio +import uuid +from typing import Any + +from opensearchpy import AsyncHttpConnection, AsyncOpenSearch + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +index_name = "test-index-async" +item_count = 100 + + +async def index_records(client: Any, item_count: int) -> None: + await asyncio.gather( + *[ + client.index( + index=index_name, + body={ + "title": "Moneyball", + "director": "Bennett Miller", + "year": "2011", + }, + id=uuid.uuid4(), + ) + for j in range(item_count) + ] + ) + + +async def test_async(client_count: int = 1, item_count: int = 1) -> None: + clients = [] + for i in range(client_count): + clients.append( + AsyncOpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + connection_class=AsyncHttpConnection, + pool_maxsize=client_count, + ) + ) + + if await clients[0].indices.exists(index_name): + await clients[0].indices.delete(index_name) + + await clients[0].indices.create(index_name) + + await asyncio.gather( + *[index_records(clients[i], item_count) for i in range(client_count)] + ) + + await clients[0].indices.refresh(index=index_name) + print(await clients[0].count(index=index_name)) + + await clients[0].indices.delete(index_name) + + await asyncio.gather(*[client.close() for client in clients]) + + +def test(item_count: int = 1, client_count: int = 1) -> None: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(test_async(item_count, client_count)) + loop.close() + + +def test_1() -> None: + test(1, 32 * item_count) + + +def test_2() -> None: + test(2, 16 * item_count) + + +def test_4() -> None: + test(4, 8 * item_count) + + +def test_8() -> None: + test(8, 4 * item_count) + + +def test_16() -> None: + test(16, 2 * item_count) + + +def test_32() -> None: + test(32, item_count) + + +__benchmarks__ = [(test_1, test_8, "1 client vs. more clients (async)")] diff --git a/benchmarks/bench_info_sync.py b/benchmarks/bench_info_sync.py new file mode 100644 index 00000000..0c69a102 --- /dev/null +++ b/benchmarks/bench_info_sync.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import logging +import sys +import time +from typing import Any + +from thread_with_return_value import ThreadWithReturnValue + +from opensearchpy import OpenSearch + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +request_count = 250 + + +root = logging.getLogger() +# root.setLevel(logging.DEBUG) +# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +handler.setFormatter(formatter) +root.addHandler(handler) + + +def get_info(client: Any, request_count: int) -> float: + tt: float = 0 + for n in range(request_count): + start = time.time() * 1000 + client.info() + total_time = time.time() * 1000 - start + tt += total_time + return tt + + +def test(thread_count: int = 1, request_count: int = 1, client_count: int = 1) -> None: + clients = [] + for i in range(client_count): + clients.append( + OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + pool_maxsize=thread_count, + ) + ) + + threads = [] + for thread_id in range(thread_count): + thread = ThreadWithReturnValue( + target=get_info, args=[clients[thread_id % len(clients)], request_count] + ) + threads.append(thread) + thread.start() + + latency = 0 + for t in threads: + latency += t.join() + + print(f"latency={latency}") + + +def test_1() -> None: + test(1, 32 * request_count, 1) + + +def test_2() -> None: + test(2, 16 * request_count, 2) + + +def test_4() -> None: + test(4, 8 * request_count, 3) + + +def test_8() -> None: + test(8, 4 * request_count, 8) + + +def test_32() -> None: + test(32, request_count, 32) + + +__benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/benchmarks/bench_sync.py b/benchmarks/bench_sync.py new file mode 100644 index 00000000..004fa2e4 --- /dev/null +++ b/benchmarks/bench_sync.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import json +import logging +import sys +import time +import uuid +from typing import Any + +from thread_with_return_value import ThreadWithReturnValue + +from opensearchpy import OpenSearch, Urllib3HttpConnection + +host = "localhost" +port = 9200 +auth = ("admin", "admin") +index_name = "test-index-sync" +item_count = 1000 + +root = logging.getLogger() +# root.setLevel(logging.DEBUG) +# logging.getLogger("urllib3.connectionpool").setLevel(logging.DEBUG) + +handler = logging.StreamHandler(sys.stdout) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") +handler.setFormatter(formatter) +root.addHandler(handler) + + +def index_records(client: Any, item_count: int) -> Any: + tt = 0 + for n in range(10): + data: Any = [] + for i in range(item_count): + data.append( + json.dumps({"index": {"_index": index_name, "_id": str(uuid.uuid4())}}) + ) + data.append(json.dumps({"value": i})) + data = "\n".join(data) + + start = time.time() * 1000 + rc = client.bulk(data) + if rc["errors"]: + raise Exception(rc["errors"]) + + server_time = rc["took"] + total_time = time.time() * 1000 - start + + if total_time < server_time: + raise Exception(f"total={total_time} < server={server_time}") + + tt += total_time - server_time + return tt + + +def test(thread_count: int = 1, item_count: int = 1, client_count: int = 1) -> None: + clients = [] + for i in range(client_count): + clients.append( + OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + pool_maxsize=thread_count, + connection_class=Urllib3HttpConnection, + ) + ) + + if clients[0].indices.exists(index_name): + clients[0].indices.delete(index_name) + + clients[0].indices.create( + index=index_name, + body={ + "mappings": { + "properties": { + "value": {"type": "float"}, + } + } + }, + ) + + threads = [] + for thread_id in range(thread_count): + thread = ThreadWithReturnValue( + target=index_records, args=[clients[thread_id % len(clients)], item_count] + ) + threads.append(thread) + thread.start() + + latency = 0 + for t in threads: + latency += t.join() + + clients[0].indices.refresh(index=index_name) + count = clients[0].count(index=index_name) + + clients[0].indices.delete(index_name) + + print(f"{count}, latency={latency}") + + +def test_1() -> None: + test(1, 32 * item_count, 1) + + +def test_2() -> None: + test(2, 16 * item_count, 2) + + +def test_4() -> None: + test(4, 8 * item_count, 3) + + +def test_8() -> None: + test(8, 4 * item_count, 8) + + +def test_32() -> None: + test(32, item_count, 32) + + +__benchmarks__ = [(test_1, test_32, "1 thread vs. 32 threads (sync)")] diff --git a/opensearchpy/_async/helpers/update_by_query.pyi b/benchmarks/bench_sync_async.py similarity index 62% rename from opensearchpy/_async/helpers/update_by_query.pyi rename to benchmarks/bench_sync_async.py index 3c5a9ed7..7950dc64 100644 --- a/opensearchpy/_async/helpers/update_by_query.pyi +++ b/benchmarks/bench_sync_async.py @@ -1,3 +1,6 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,6 +10,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from opensearchpy.helpers.search import Request -class AsyncUpdateByQuery(Request): ... +import bench_async +import bench_sync + +__benchmarks__ = [(bench_sync.test_32, bench_async.test_8, "sync vs. async (8)")] diff --git a/benchmarks/poetry.lock b/benchmarks/poetry.lock new file mode 100644 index 00000000..d4992d68 --- /dev/null +++ b/benchmarks/poetry.lock @@ -0,0 +1,847 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +optional = false +python-versions = ">=3.5" +files = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "markdown-it-py" +version = "2.2.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, + {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" +typing_extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "opensearch-py" +version = "2.3.2" +description = "Python client for OpenSearch" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +files = [] +develop = true + +[package.dependencies] +aiohttp = {version = ">=3,<4", optional = true, markers = "extra == \"async\""} +certifi = ">=2022.12.07" +python-dateutil = "*" +requests = ">=2.4.0,<3.0.0" +six = "*" +urllib3 = ">=1.26.9" + +[package.extras] +async = ["aiohttp (>=3,<4)"] +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] +kerberos = ["requests_kerberos"] + +[package.source] +type = "directory" +url = ".." + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pyinstrument" +version = "4.6.0" +description = "Call stack profiler for Python. Shows you why your code is slow!" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, + {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, +] + +[package.extras] +bin = ["click", "nox"] +docs = ["furo (==2021.6.18b36)", "myst-parser (==0.15.1)", "sphinx (==4.2.0)", "sphinxcontrib-programoutput (==0.17)"] +examples = ["django", "numpy"] +test = ["flaky", "greenlet (>=3.0.0a1)", "ipython", "pytest", "pytest-asyncio (==0.12.0)", "sphinx-autobuild (==2021.3.14)", "trio"] +types = ["typing-extensions"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.6.0-py3-none-any.whl", hash = "sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245"}, + {file = "rich-13.6.0.tar.gz", hash = "sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "richbench" +version = "1.0.3" +description = "Richbench, a little benchmarking tool" +optional = false +python-versions = ">=3.6" +files = [ + {file = "richbench-1.0.3-py3-none-any.whl", hash = "sha256:f52651cc0e0069a1355c5ed8cda214cb3f8961e7aaa431e440071d30f62e3e55"}, + {file = "richbench-1.0.3.tar.gz", hash = "sha256:744afa3e78cbd919721042c11f7b7f9d2f546cebb3333d40290c4a0d88791701"}, +] + +[package.dependencies] +pyinstrument = "*" +rich = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "2.0.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "84953079e0bc825b495f10721d514529becb4fc8eef2b9772562f63b0bd75ef3" diff --git a/benchmarks/poetry.toml b/benchmarks/poetry.toml new file mode 100644 index 00000000..eadfd54b --- /dev/null +++ b/benchmarks/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +create = true \ No newline at end of file diff --git a/benchmarks/pyproject.toml b/benchmarks/pyproject.toml new file mode 100644 index 00000000..c0c82142 --- /dev/null +++ b/benchmarks/pyproject.toml @@ -0,0 +1,16 @@ +[tool.poetry] +name = "package" +version = "0.1.0" +description = "OpenSearch Python client benchmarks." +authors = ["Daniel Doubrovkine "] +license = "Apache 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.7" +opensearch-py = { path = "..", develop=true, extras=["async"] } +richbench = "*" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/benchmarks/thread_with_return_value.py b/benchmarks/thread_with_return_value.py new file mode 100644 index 00000000..089c6fde --- /dev/null +++ b/benchmarks/thread_with_return_value.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +from threading import Thread +from typing import Any, Optional + + +class ThreadWithReturnValue(Thread): + _target: Any + _args: Any + _kwargs: Any + + def __init__( + self, + group: Any = None, + target: Any = None, + name: Optional[str] = None, + args: Any = (), + kwargs: Any = {}, + Verbose: Optional[bool] = None, + ) -> None: + Thread.__init__(self, group, target, name, args, kwargs) + self._return = None + + def run(self) -> None: + if self._target is not None: + self._return = self._target(*self._args, **self._kwargs) + + def join(self, *args: Any) -> Any: + Thread.join(self, *args) + return self._return diff --git a/dev-requirements.txt b/dev-requirements.txt index a1b4bd2b..a79a1a0b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -3,23 +3,23 @@ pytest pytest-cov coverage mock -sphinx<7.2 +sphinx<7.3 sphinx_rtd_theme jinja2 pytz +deepmerge # No wheels for Python 3.10 yet! numpy; python_version<"3.10" pandas; python_version<"3.10" -pyyaml>=5.4; python_version>="3.6" -pyyaml==5.3.1; python_version<"3.6" +pyyaml>=5.4 isort -black; python_version>="3.6" +black twine # Requirements for testing [async] extra -aiohttp; python_version>="3.6" -pytest-asyncio<=0.21.1; python_version>="3.6" -unasync; python_version>="3.6" +aiohttp +pytest-asyncio<=0.21.1 +unasync diff --git a/docs/source/conf.py b/docs/source/conf.py index ea677630..64ff3c52 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,3 +1,12 @@ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + # Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full @@ -17,9 +26,11 @@ # -- Project information ----------------------------------------------------- -project = "OpenSearch Python Client" -copyright = "OpenSearch Project Contributors" -author = "OpenSearch Project Contributors" +from typing import Any + +project: str = "OpenSearch Python Client" +copyright: str = "OpenSearch Project Contributors" +author: str = "OpenSearch Project Contributors" # -- General configuration --------------------------------------------------- @@ -27,7 +38,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ +extensions: Any = [ "sphinx.ext.autodoc", "sphinx_rtd_theme", "sphinx.ext.viewcode", @@ -38,12 +49,12 @@ ] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path: Any = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] +exclude_patterns: Any = [] # -- Options for HTML output ------------------------------------------------- @@ -51,31 +62,31 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = "sphinx_rtd_theme" +html_theme: str = "sphinx_rtd_theme" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path: Any = ["_static"] # -- additional settings ------------------------------------------------- -intersphinx_mapping = { +intersphinx_mapping: Any = { "python": ("https://docs.python.org/3", None), } -html_logo = "imgs/OpenSearch.svg" +html_logo: str = "imgs/OpenSearch.svg" # These paths are either relative to html_static_path # or fully qualified paths (eg. https://...) -html_css_files = [ +html_css_files: Any = [ "css/custom.css", ] # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = False +html_show_sphinx: bool = False # add github link -html_context = { +html_context: Any = { "display_github": True, "github_user": "opensearch-project", "github_repo": "opensearch-py", @@ -85,18 +96,18 @@ # -- autodoc config ------------------------------------------------- # This value controls how to represent typehints. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints -autodoc_typehints = "description" +autodoc_typehints: str = "description" # This value selects what content will be inserted into the main body of an autoclass directive. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autoclass_content -autoclass_content = "both" +autoclass_content: str = "both" # https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-add_module_names # add_module_names = False # The default options for autodoc directives. # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_default_options -autodoc_default_options = { +autodoc_default_options: Any = { # If set, autodoc will generate document for the members of the target module, class or exception. # noqa: E501 # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#directive-option-automodule-members "members": True, diff --git a/guides/advanced_index_actions.md b/guides/advanced_index_actions.md new file mode 100644 index 00000000..a3a0620e --- /dev/null +++ b/guides/advanced_index_actions.md @@ -0,0 +1,113 @@ +# Advanced Index Actions Guide +- [Advanced Index Actions](#advanced-index-actions) + - [Setup](#setup) + - [Api Actions](#api-actions) + - [Clear Index Cache](#clear-index-cache) + - [Flush Index](#flush-index) + - [Refresh Index](#refresh-index) + - [Open or Close Index](#open-or-close-index) + - [Force Merge Index](#force-merge-index) + - [Clone Index](#clone-index) + - [Split Index](#split-index) + - [Cleanup](#cleanup) + + +# Advanced Index Actions +In this guide, we will look at some advanced index actions that are not covered in the [Index Lifecycle](index_lifecycle.md) guide. + +## Setup +Let's create a client instance, and an index named `movies`: + +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://@localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +client.indices.create(index='movies') +``` + +## API Actions +### Clear index cache +You can clear the cache of an index or indices by using the `indices.clear_cache` API action. The following example clears the cache of the `movies` index: + +```python +client.indices.clear_cache(index='movies') +``` + +By default, the `indices.clear_cache` API action clears all types of cache. To clear specific types of cache pass the `query`, `fielddata`, or `request` parameter to the API action: + +```python +client.indices.clear_cache(index='movies', query=True) +client.indices.clear_cache(index='movies', fielddata=True, request=True) +``` + +### Flush index +Sometimes you might want to flush an index or indices to make sure that all data in the transaction log is persisted to the index. To flush an index or indices use the `indices.flush` API action. The following example flushes the `movies` index: + +```python +client.indices.flush(index='movies') +``` + +### Refresh index +You can refresh an index or indices to make sure that all changes are available for search. To refresh an index or indices use the `indices.refresh` API action: + +```python +client.indices.refresh(index='movies') +``` + +### Open or close index +You can close an index to prevent read and write operations on the index. A closed index does not have to maintain certain data structures that an opened index require, reducing the memory and disk space required by the index. The following example closes and reopens the `movies` index: + +```python +client.indices.close(index='movies') +client.indices.open(index='movies') +``` + +### Force merge index +You can force merge an index or indices to reduce the number of segments in the index. This can be useful if you have a large number of small segments in the index. Merging segments reduces the memory footprint of the index. Do note that this action is resource intensive and it is only recommended for read-only indices. The following example force merges the `movies` index: + +```python +client.indices.forcemerge(index='movies') +``` + +### Clone index +You can clone an index to create a new index with the same mappings, data, and MOST of the settings. The source index must be in read-only state for cloning. The following example blocks write operations from `movies` index, clones the said index to create a new index named `movies_clone`, then re-enables write: + +```python +client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': True}}}) +client.indices.clone(index='movies', target='movies_clone') +client.indices.put_settings(index='movies', body={'index': {'blocks': {'write': False}}}) +``` + +### Split index +You can split an index into another index with more primary shards. The source index must be in read-only state for splitting. The following example create the read-only `books` index with 30 routing shards and 5 shards (which is divisible by 30), splits index into `bigger_books` with 10 shards (which is also divisible by 30), then re-enables write: + +```python +client.indices.create( + index='books', + body={ 'settings': { + 'index': { 'number_of_shards': 5, + 'number_of_routing_shards': 30, + 'blocks': { 'write': True } } } }) + +client.indices.split( + index='books', + target='bigger_books', + body={ 'settings': { 'index': { 'number_of_shards': 10 } } }) + +client.indices.put_settings(index='books', body={ 'index': { 'blocks': { 'write': False } } }) +``` + +## Cleanup + +Let's delete all the indices we created in this guide: + +```python +client.indices.delete(index=['movies', 'books', 'movies_clone', 'bigger_books']) +``` + +# Sample Code +See [advanced_index_actions_sample.py](/samples/advanced_index_actions/advanced_index_actions_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/guides/auth.md b/guides/auth.md index 4b314764..a07d3996 100644 --- a/guides/auth.md +++ b/guides/auth.md @@ -1,5 +1,6 @@ - [Authentication](#authentication) - [IAM Authentication](#iam-authentication) + - [IAM Authentication with a Synchronous Client](#iam-authentication-with-a-synchronous-client) - [IAM Authentication with an Async Client](#iam-authentication-with-an-async-client) - [Kerberos](#kerberos) @@ -9,24 +10,28 @@ OpenSearch allows you to use different methods for the authentication via `conne ## IAM Authentication -Opensearch-py supports IAM-based authentication via `AWSV4SignerAuth`, which uses `RequestHttpConnection` as the transport class for communicating with OpenSearch clusters running in Amazon Managed OpenSearch and OpenSearch Serverless, and works in conjunction with [botocore](https://pypi.org/project/botocore/). +This library supports IAM-based authentication when communicating with OpenSearch clusters running in Amazon Managed OpenSearch and OpenSearch Serverless. + +## IAM Authentication with a Synchronous Client + +For `Urllib3HttpConnection` use `Urllib3AWSV4SignerAuth`, and for `RequestHttpConnection` use `RequestsAWSV4SignerAuth`. ```python -from opensearchpy import OpenSearch, RequestsHttpConnection, AWSV4SignerAuth +from opensearchpy import OpenSearch, Urllib3HttpConnection, Urllib3AWSV4SignerAuth import boto3 host = '' # cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com region = 'us-west-2' service = 'es' # 'aoss' for OpenSearch Serverless credentials = boto3.Session().get_credentials() -auth = AWSV4SignerAuth(credentials, region, service) +auth = Urllib3AWSV4SignerAuth(credentials, region, service) client = OpenSearch( hosts = [{'host': host, 'port': 443}], http_auth = auth, use_ssl = True, verify_certs = True, - connection_class = RequestsHttpConnection, + connection_class = Urllib3HttpConnection, pool_maxsize = 20 ) @@ -113,7 +118,6 @@ client = OpenSearch( ['htps://...'], use_ssl=True, verify_certs=True, - connection_class=RequestsHttpConnection, http_auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL) ) diff --git a/guides/connection_classes.md b/guides/connection_classes.md new file mode 100644 index 00000000..da7357fb --- /dev/null +++ b/guides/connection_classes.md @@ -0,0 +1,81 @@ +- [Connection Classes](#connection-classes) + - [Selecting a Connection Class](#selecting-a-connection-class) + - [Urllib3HttpConnection](#urllib3httpconnection) + - [RequestsHttpConnection](#requestshttpconnection) + - [AsyncHttpConnection](#asynchttpconnection) + - [Connection Pooling](#connection-pooling) + +# Connection Classes + +The OpenSearch Python synchronous client supports both the `Urllib3HttpConnection` connection class (default) from the [urllib3](https://pypi.org/project/urllib3/) library, and `RequestsHttpConnection` from the [requests](https://pypi.org/project/requests/) library. We recommend you use the default, unless your application is standardized on `requests`. + +The faster, asynchronous client, implements a class called `AsyncHttpConnection`, which uses [aiohttp](https://pypi.org/project/aiohttp/). + +## Selecting a Connection Class + +### Urllib3HttpConnection + +```python +from opensearchpy import OpenSearch, Urllib3HttpConnection + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = Urllib3HttpConnection +) +``` + +### RequestsHttpConnection + +```python +from opensearchpy import OpenSearch, RequestsHttpConnection + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = RequestsHttpConnection +) +``` + +### AsyncHttpConnection + +```python +from opensearchpy import AsyncOpenSearch, AsyncHttpConnection + +async def main(): + client = AsyncOpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + connection_class = AsyncHttpConnection + ) +``` + +## Connection Pooling + +The OpenSearch Python client has a connection pool for each `host` value specified during initialization, and a connection pool for HTTP connections to each host implemented in the underlying HTTP libraries. You can adjust the max size of the latter connection pool with `pool_maxsize`. + +If you don't set this value, each connection library implementation will provide its default, which is typically `10`. Changing the pool size may improve performance in some multithreaded scenarios. + +The following example sets the number of connections in the connection pool to 12. + +```python +from opensearchpy import OpenSearch + +client = OpenSearch( + hosts = [{'host': 'localhost', 'port': 9200}], + http_auth = ('admin', 'admin'), + use_ssl = True, + verify_certs = False, + ssl_show_warn = False, + pool_maxsize = 12, +) +``` \ No newline at end of file diff --git a/guides/document_lifecycle.md b/guides/document_lifecycle.md new file mode 100644 index 00000000..fcad9e8c --- /dev/null +++ b/guides/document_lifecycle.md @@ -0,0 +1,182 @@ +# Document Lifecycle Guide +- [Document Lifecycle](#document-lifecycle) + - [Setup](#setup) + - [Document API Actions](#document-api-actions) + - [Create a new document with specified ID](#create-a-new-document-with-specified-id) + - [Create a new document with auto-generated ID](#create-a-new-document-with-auto-generated-id) + - [Get a document](#get-a-document) + - [Get multiple documents](#get-multiple-documents) + - [Check if a document exists](#check-if-a-document-exists) + - [Update a document](#update-a-document) + - [Update multiple documents by query](#update-multiple-documents-by-query) + - [Delete a document](#delete-a-document) + - [Delete multiple documents by query](#delete-multiple-documents-by-query) + - [Cleanup](#cleanup) + + +# Document Lifecycle +This guide covers OpenSearch Python Client API actions for Document Lifecycle. You'll learn how to create, read, update, and delete documents in your OpenSearch cluster. Whether you're new to OpenSearch or an experienced user, this guide provides the information you need to manage your document lifecycle effectively. + +## Setup +Assuming you have OpenSearch running locally on port 9200, you can create a client instance +with the following code: + +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +``` + +Next, create an index named `movies` with the default settings: + +```python +index = 'movies' +if not client.indices.exists(index=index): + client.indices.create(index=index) +``` + +## Document API Actions + +### Create a new document with specified ID +To create a new document, use the `create` or `index` API action. The following code creates two new documents with IDs of `1` and `2`: + +```python +client.create(index=index, id=1, body={'title': 'Beauty and the Beast', 'year': 1991}) +client.create(index=index, id=2, body={'title': 'Beauty and the Beast - Live Action', 'year': 2017}) +``` + +Note that the `create` action is NOT idempotent. If you try to create a document with an ID that already exists, the request will fail: + +```python +try: + client.create(index=index, id=1, body={'title': 'Just Another Movie'}) +except Exception as e: + print(e) +``` + +The `index` action, on the other hand, is idempotent. If you try to index a document with an existing ID, the request will succeed and overwrite the existing document. Note that no new document will be created in this case. You can think of the `index` action as an upsert: + +```python +client.index(index=index, id=2, body={'title': 'Updated Title'}) +client.index(index=index, id=2, body={'title': 'The Lion King', 'year': 1994}) +``` + +### Create a new document with auto-generated ID +You can also create a new document with an auto-generated ID by omitting the `id` parameter. The following code creates documents with an auto-generated IDs in the `movies` index: + +```python +client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) +``` + +In this case, the ID of the created document in the `result` field of the response body: + +```python +{ + "_index": "movies", + "_type": "_doc", + "_id": "1", + "_version": 1, + "result": "created", + "_shards": { + "total": 2, + "successful": 1, + "failed": 0 + }, + "_seq_no": 0, + "_primary_term": 1 +} +``` + +### Get a document +To get a document, use the `get` API action. The following code gets the document with ID `1` from the `movies` index: + +```python +client.get(index=index, id=1)['_source'] +# OUTPUT: {"title"=>"Beauty and the Beast","year"=>1991} +``` + +You can also use `_source_includes` and `_source_excludes` parameters to specify which fields to include or exclude in the response: + +```python +client.get(index=index, id=1, _source_includes=['title'])['_source'] +# OUTPUT: {"title": "Beauty and the Beast"} + +client.get(index=index, id=1, _source_excludes=['title'])['_source'] +# OUTPUT: {"year": 1991} +``` + +### Get multiple documents +To get multiple documents, use the `mget` API action: + +```python +client.mget(index=index, body={ 'docs': [{ '_id': 1 }, { '_id': 2 }] })['docs'] +``` + +### Check if a document exists +To check if a document exists, use the `exists` API action. The following code checks if the document with ID `1` exists in the `movies` index: + +```python +client.exists(index=index, id=1) +``` + +### Update a document +To update a document, use the `update` API action. The following code updates the `year` field of the document with ID `1` in the `movies` index: + +```python +client.update(index=index, id=1, body={'doc': {'year': 1995}}) +``` + +Alternatively, you can use the `script` parameter to update a document using a script. The following code increments the `year` field of the of document with ID `1` by 5 using painless script, the default scripting language in OpenSearch: + +```python +client.update(index=index, id=1, body={ 'script': { 'source': 'ctx._source.year += 5' } }) +``` + +Note that while both `update` and `index` actions perform updates, they are not the same. The `update` action is a partial update, while the `index` action is a full update. The `update` action only updates the fields that are specified in the request body, while the `index` action overwrites the entire document with the new document. + +### Update multiple documents by query + +To update documents that match a query, use the `update_by_query` API action. The following code decreases the `year` field of all documents with `year` greater than 2023: + +```python +client.update_by_query(index=index, body={ + 'script': { 'source': 'ctx._source.year -= 1' }, + 'query': { 'range': { 'year': { 'gt': 2023 } } } +}) +``` + +### Delete a document +To delete a document, use the `delete` API action. The following code deletes the document with ID `1`: + +```python +client.delete(index=index, id=1) +``` + +By default, the `delete` action is not idempotent. If you try to delete a document that does not exist, or delete the same document twice, you will run into Not Found (404) error. You can make the `delete` action idempotent by setting the `ignore` parameter to `404`: + +```python +client.delete(index=index, id=1, ignore=404) +``` + +### Delete multiple documents by query +To delete documents that match a query, use the `delete_by_query` API action. The following code deletes all documents with `year` greater than 2023: + +```python +client.delete_by_query(index=index, body={ + 'query': { 'range': { 'year': { 'gt': 2023 } } } +}) +``` + +## Cleanup +To clean up the resources created in this guide, delete the `movies` index: + +```python +client.indices.delete(index=index) +``` + +# Sample Code +See [document_lifecycle_sample.py](/samples/document_lifecycle/document_lifecycle_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/guides/index_template.md b/guides/index_template.md new file mode 100644 index 00000000..3afdd1dc --- /dev/null +++ b/guides/index_template.md @@ -0,0 +1,184 @@ +# Index Template +Index templates are a convenient way to define settings, mappings, and aliases for one or more indices when they are created. In this guide, you'll learn how to create an index template and apply it to an index. + +## Setup + +Assuming you have OpenSearch running locally on port 9200, you can create a client instance with the following code: +```python +from opensearchpy import OpenSearch +client = OpenSearch( + hosts=['https://localhost:9200'], + use_ssl=True, + verify_certs=False, + http_auth=('admin', 'admin') +) +``` + +## Index Template API Actions + +### Create an Index Template +You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: + +```python +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + }, + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } + } +) +``` + +Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named `books-nonfiction` and verify that its settings and mappings match those of the template: + +```python +client.indices.create(index='books-nonfiction') +print(client.indices.get(index='books-nonfiction')) +``` + +### Multiple Index Templates +If multiple index templates match the index's name, OpenSearch will apply the template with the highest priority. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: + +```python +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'priority': 0, # default priority + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } + } +) + +client.indices.put_index_template( + name='books-fiction', + body={ + 'index_patterns': ['books-fiction-*'], + 'priority': 1, # higher priority than the `books` template + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } + } +) +``` + +When we create an index named `books-fiction-romance`, OpenSearch will apply the `books-fiction-*` template's settings to the index: + +```python +client.indices.create(index='books-fiction-romance') +print(client.indices.get(index='books-fiction-romance')) +``` + +### Composable Index Templates +Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: + +```python +client.cluster.put_component_template( + name='books_mappings', + body={ + 'template': { + 'mappings': { + 'properties': { + 'title': { 'type': 'text' }, + 'author': { 'type': 'text' }, + 'published_on': { 'type': 'date' }, + 'pages': { 'type': 'integer' } + } + } + } + } +) + +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'composed_of': ['books_mappings'], # use the `books_mappings` component template + 'priority': 0, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 3, + 'number_of_replicas': 0 + } + } + } + } +) + +client.indices.put_index_template( + name='books', + body={ + 'index_patterns': ['books-*'], + 'composed_of': ['books_mappings'], # use the `books_mappings` component template + 'priority': 1, + 'template': { + 'settings': { + 'index': { + 'number_of_shards': 1, + 'number_of_replicas': 1 + } + } + } + } +) +``` + +When we create an index named `books-fiction-horror`, OpenSearch will apply the `books-fiction-*` template's settings, and `books_mappings` template mappings to the index: + +```python +client.indices.create(index='books-fiction-horror') +print(client.indices.get(index='books-fiction-horror')) +``` + +### Get an Index Template +You can get an index template with the `get_index_template` API action: + +```python +print(client.indices.get_index_template(name='books')) +``` + +### Delete an Index Template +You can delete an index template with the `delete_template` API action: + +```python +client.indices.delete_index_template(name='books') +``` + +## Cleanup +Let's delete all resources created in this guide: + +```python +client.indices.delete(index='books-*') +client.indices.delete_index_template(name='books-fiction') +client.cluster.delete_component_template(name='books_mappings') +``` + +# Sample Code +See [index_template_sample.py](/samples/index_template/index_template_sample.py) for a working sample of the concepts in this guide. \ No newline at end of file diff --git a/guides/json.md b/guides/json.md new file mode 100644 index 00000000..edefa209 --- /dev/null +++ b/guides/json.md @@ -0,0 +1,66 @@ +- [Making Raw JSON REST Requests](#making-raw-json-rest-requests) + - [GET](#get) + - [PUT](#put) + - [POST](#post) + - [DELETE](#delete) + +# Making Raw JSON REST Requests + +The OpenSearch client implements many high-level REST DSLs that invoke OpenSearch APIs. However you may find yourself in a situation that requires you to invoke an API that is not supported by the client. Use `client.transport.perform_request` to do so. See [samples/json](../samples/json) for a complete working sample. + +## GET + +The following example returns the server version information via `GET /`. + +```python +info = client.transport.perform_request('GET', '/') +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") +``` + +Note that the client will parse the response as JSON when appropriate. + +## PUT + +The following example creates an index. + +```python +index_body = { + 'settings': { + 'index': { + 'number_of_shards': 4 + } + } +} + +client.transport.perform_request("PUT", "/movies", body=index_body) +``` + +Note that the client will raise errors automatically. For example, if the index already exists, an `opensearchpy.exceptions.RequestError: RequestError(400, 'resource_already_exists_exception',` will be thrown. + +## POST + +The following example searches for a document. + +```python +q = 'miller' + +query = { + 'size': 5, + 'query': { + 'multi_match': { + 'query': q, + 'fields': ['title^2', 'director'] + } + } +} + +client.transport.perform_request("POST", "/movies/_search", body = query) +``` + +## DELETE + +The following example deletes an index. + +```python +client.transport.perform_request("DELETE", "/movies") +``` diff --git a/guides/plugins/knn.md b/guides/plugins/knn.md index 7a3e6977..a7775c88 100644 --- a/guides/plugins/knn.md +++ b/guides/plugins/knn.md @@ -15,7 +15,7 @@ Short for k-nearest neighbors, the k-NN plugin enables users to search for the k In the following example we create a 5-dimensional k-NN index with random data. You can find a synchronous version of this working sample in [samples/knn/knn-basics.py](../../samples/knn/knn-basics.py) and an asynchronous one in [samples/knn/knn-async-basics.py](../../samples/knn/knn-async-basics.py). ```bash -$ poetry run knn/knn-basics.py +$ poetry run python knn/knn-basics.py Searching for [0.61, 0.05, 0.16, 0.75, 0.49] ... {'_index': 'my-index', '_id': '3', '_score': 0.9252405, '_source': {'values': [0.64, 0.3, 0.27, 0.68, 0.51]}} @@ -96,7 +96,7 @@ for hit in results["hits"]["hits"]: In [the boolean-filter.py sample](../../samples/knn/knn-boolean-filter.py) we create a 5-dimensional k-NN index with random data and a `metadata` field that contains a book genre (e.g. `fiction`). The search query is a k-NN search filtered by genre. The filter clause is outside the k-NN query clause and is applied after the k-NN search. ```bash -$ poetry run knn/knn-boolean-filter.py +$ poetry run python knn/knn-boolean-filter.py Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... @@ -109,7 +109,7 @@ Searching for [0.08, 0.42, 0.04, 0.76, 0.41] with the 'romance' genre ... In [the lucene-filter.py sample](../../samples/knn/knn-efficient-filter.py) we implement the example in [the k-NN documentation](https://opensearch.org/docs/latest/search-plugins/knn/filter-search-knn/), which creates an index that uses the Lucene engine and HNSW as the method in the mapping, containing hotel location and parking data, then search for the top three hotels near the location with the coordinates `[5, 4]` that are rated between 8 and 10, inclusive, and provide parking. ```bash -$ poetry run knn/knn-efficient-filter.py +$ poetry run python knn/knn-efficient-filter.py {'_index': 'hotels-index', '_id': '3', '_score': 0.72992706, '_source': {'location': [4.9, 3.4], 'parking': 'true', 'rating': 9}} {'_index': 'hotels-index', '_id': '6', '_score': 0.3012048, '_source': {'location': [6.4, 3.4], 'parking': 'true', 'rating': 9}} diff --git a/guides/proxy.md b/guides/proxy.md index 5be7edf4..96b7d441 100644 --- a/guides/proxy.md +++ b/guides/proxy.md @@ -13,7 +13,6 @@ OpenSearch( hosts=["htps://..."], use_ssl=True, verify_certs=True, - connection_class=RequestsHttpConnection, trust_env=True, ) ``` diff --git a/noxfile.py b/noxfile.py index 3504ff75..e9189cc9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,8 @@ # under the License. +from typing import Any + import nox SOURCE_FILES = ( @@ -33,19 +36,22 @@ "opensearchpy/", "test_opensearchpy/", "utils/", + "samples/", + "benchmarks/", + "docs/", ) -@nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) -def test(session): +@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]) # type: ignore +def test(session: Any) -> None: session.install(".") session.install("-r", "dev-requirements.txt") session.run("python", "setup.py", "test") -@nox.session() -def format(session): +@nox.session() # type: ignore +def format(session: Any) -> None: session.install("black", "isort") session.run("isort", "--profile=black", *SOURCE_FILES) @@ -55,9 +61,21 @@ def format(session): lint(session) -@nox.session() -def lint(session): - session.install("flake8", "black", "mypy", "isort", "types-requests", "types-six") +@nox.session(python=["3.7"]) # type: ignore +def lint(session: Any) -> None: + session.install( + "flake8", + "black", + "mypy", + "isort", + "types-requests", + "types-six", + "types-simplejson", + "types-python-dateutil", + "types-PyYAML", + "types-mock", + "types-pytz", + ) session.run("isort", "--check", "--profile=black", *SOURCE_FILES) session.run("black", "--target-version=py33", "--check", *SOURCE_FILES) @@ -69,7 +87,7 @@ def lint(session): # Run mypy on the package and then the type examples separately for # the two different mypy use-cases, ourselves and our users. - session.run("mypy", "--strict", "opensearchpy/") + session.run("mypy", "--strict", *SOURCE_FILES) session.run("mypy", "--strict", "test_opensearchpy/test_types/sync_types.py") session.run("mypy", "--strict", "test_opensearchpy/test_types/async_types.py") @@ -80,10 +98,17 @@ def lint(session): session.run("mypy", "--strict", "test_opensearchpy/test_types/sync_types.py") -@nox.session() -def docs(session): +@nox.session() # type: ignore +def docs(session: Any) -> None: session.install(".") session.install( "-rdev-requirements.txt", "sphinx-rtd-theme", "sphinx-autodoc-typehints" ) session.run("python", "-m", "pip", "install", "sphinx-autodoc-typehints") + + +@nox.session() # type: ignore +def generate(session: Any) -> None: + session.install("-rdev-requirements.txt") + session.run("python", "utils/generate-api.py") + format(session) diff --git a/opensearchpy/__init__.py b/opensearchpy/__init__.py index 6669f179..e9ef6485 100644 --- a/opensearchpy/__init__.py +++ b/opensearchpy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -30,21 +31,25 @@ import logging import re -import sys import warnings from ._version import __versionstr__ _major, _minor, _patch = [ - int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() + int(x) for x in re.search(r"^(\d+)\.(\d+)\.(\d+)", __versionstr__).groups() # type: ignore ] + VERSION = __version__ = (_major, _minor, _patch) logger = logging.getLogger("opensearch") logger.addHandler(logging.NullHandler()) +from ._async.client import AsyncOpenSearch +from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection +from ._async.transport import AsyncTransport from .client import OpenSearch from .connection import ( + AsyncHttpConnection, Connection, RequestsHttpConnection, Urllib3HttpConnection, @@ -71,7 +76,12 @@ UnknownDslObject, ValidationException, ) -from .helpers import AWSV4SignerAsyncAuth, AWSV4SignerAuth +from .helpers import ( + AWSV4SignerAsyncAuth, + AWSV4SignerAuth, + RequestsAWSV4SignerAuth, + Urllib3AWSV4SignerAuth, +) from .helpers.aggs import A from .helpers.analysis import analyzer, char_filter, normalizer, token_filter, tokenizer from .helpers.document import Document, InnerDoc, MetaField @@ -166,6 +176,8 @@ "OpenSearchWarning", "OpenSearchDeprecationWarning", "AWSV4SignerAuth", + "Urllib3AWSV4SignerAuth", + "RequestsAWSV4SignerAuth", "AWSV4SignerAsyncAuth", "A", "AttrDict", @@ -239,24 +251,10 @@ "normalizer", "token_filter", "tokenizer", + "AIOHttpConnection", + "AsyncConnection", + "AsyncTransport", + "AsyncOpenSearch", + "AsyncHttpConnection", + "__versionstr__", ] - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from ._async.client import AsyncOpenSearch - from ._async.http_aiohttp import AIOHttpConnection, AsyncConnection - from ._async.transport import AsyncTransport - from .connection import AsyncHttpConnection - - __all__ += [ - "AIOHttpConnection", - "AsyncConnection", - "AsyncTransport", - "AsyncOpenSearch", - "AsyncHttpConnection", - ] -except (ImportError, SyntaxError): - pass diff --git a/opensearchpy/__init__.pyi b/opensearchpy/__init__.pyi deleted file mode 100644 index 01fccaec..00000000 --- a/opensearchpy/__init__.pyi +++ /dev/null @@ -1,138 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Tuple - -from .client import OpenSearch as OpenSearch -from .connection import AsyncHttpConnection as AsyncHttpConnection -from .connection import Connection as Connection -from .connection import RequestsHttpConnection as RequestsHttpConnection -from .connection import Urllib3HttpConnection as Urllib3HttpConnection -from .connection import connections as connections -from .connection_pool import ConnectionPool as ConnectionPool -from .connection_pool import ConnectionSelector as ConnectionSelector -from .connection_pool import RoundRobinSelector as RoundRobinSelector -from .exceptions import AuthenticationException as AuthenticationException -from .exceptions import AuthorizationException as AuthorizationException -from .exceptions import ConflictError as ConflictError -from .exceptions import ConnectionError as ConnectionError -from .exceptions import ConnectionTimeout as ConnectionTimeout -from .exceptions import IllegalOperation as IllegalOperation -from .exceptions import ImproperlyConfigured as ImproperlyConfigured -from .exceptions import NotFoundError as NotFoundError -from .exceptions import OpenSearchDeprecationWarning as OpenSearchDeprecationWarning -from .exceptions import OpenSearchDslException as OpenSearchDslException -from .exceptions import OpenSearchException as OpenSearchException -from .exceptions import OpenSearchWarning as OpenSearchWarning -from .exceptions import RequestError as RequestError -from .exceptions import SerializationError as SerializationError -from .exceptions import SSLError as SSLError -from .exceptions import TransportError as TransportError -from .exceptions import UnknownDslObject as UnknownDslObject -from .exceptions import ValidationException as ValidationException -from .helpers.aggs import A as A -from .helpers.analysis import Analyzer, CharFilter, Normalizer, TokenFilter, Tokenizer -from .helpers.document import Document as Document -from .helpers.document import InnerDoc as InnerDoc -from .helpers.document import MetaField as MetaField -from .helpers.faceted_search import DateHistogramFacet as DateHistogramFacet -from .helpers.faceted_search import Facet as Facet -from .helpers.faceted_search import FacetedResponse as FacetedResponse -from .helpers.faceted_search import FacetedSearch as FacetedSearch -from .helpers.faceted_search import HistogramFacet as HistogramFacet -from .helpers.faceted_search import NestedFacet as NestedFacet -from .helpers.faceted_search import RangeFacet as RangeFacet -from .helpers.faceted_search import TermsFacet as TermsFacet -from .helpers.field import Binary as Binary -from .helpers.field import Boolean as Boolean -from .helpers.field import Byte as Byte -from .helpers.field import Completion as Completion -from .helpers.field import CustomField as CustomField -from .helpers.field import Date as Date -from .helpers.field import DateRange as DateRange -from .helpers.field import DenseVector as DenseVector -from .helpers.field import Double as Double -from .helpers.field import DoubleRange as DoubleRange -from .helpers.field import Field as Field -from .helpers.field import Float as Float -from .helpers.field import FloatRange as FloatRange -from .helpers.field import GeoPoint as GeoPoint -from .helpers.field import GeoShape as GeoShape -from .helpers.field import HalfFloat as HalfFloat -from .helpers.field import Integer as Integer -from .helpers.field import IntegerRange as IntegerRange -from .helpers.field import Ip as Ip -from .helpers.field import IpRange as IpRange -from .helpers.field import Join as Join -from .helpers.field import Keyword as Keyword -from .helpers.field import Long as Long -from .helpers.field import LongRange as LongRange -from .helpers.field import Murmur3 as Murmur3 -from .helpers.field import Nested as Nested -from .helpers.field import Object as Object -from .helpers.field import Percolator as Percolator -from .helpers.field import RangeField as RangeField -from .helpers.field import RankFeature as RankFeature -from .helpers.field import RankFeatures as RankFeatures -from .helpers.field import ScaledFloat as ScaledFloat -from .helpers.field import SearchAsYouType as SearchAsYouType -from .helpers.field import Short as Short -from .helpers.field import SparseVector as SparseVector -from .helpers.field import Text as Text -from .helpers.field import TokenCount as TokenCount -from .helpers.field import construct_field as construct_field -from .helpers.function import SF as SF -from .helpers.index import Index as Index -from .helpers.index import IndexTemplate as IndexTemplate -from .helpers.mapping import Mapping as Mapping -from .helpers.query import Q as Q -from .helpers.search import MultiSearch as MultiSearch -from .helpers.search import Search as Search -from .helpers.update_by_query import UpdateByQuery as UpdateByQuery -from .helpers.utils import AttrDict as AttrDict -from .helpers.utils import AttrList as AttrList -from .helpers.utils import DslBase as DslBase -from .helpers.wrappers import Range as Range -from .serializer import JSONSerializer as JSONSerializer -from .transport import Transport as Transport - -try: - if sys.version_info < (3, 6): - raise ImportError - - from ._async.client import AsyncOpenSearch as AsyncOpenSearch - from ._async.http_aiohttp import AIOHttpConnection as AIOHttpConnection - from ._async.http_aiohttp import AsyncConnection as AsyncConnection - from ._async.transport import AsyncTransport as AsyncTransport - from .helpers import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth - from .helpers import AWSV4SignerAuth as AWSV4SignerAuth -except (ImportError, SyntaxError): - pass - -VERSION: Tuple[int, int, int] -__version__: Tuple[int, int, int] -__versionstr__: str diff --git a/opensearchpy/_async/__init__.py b/opensearchpy/_async/__init__.py index 7e52ae22..392fa5bd 100644 --- a/opensearchpy/_async/__init__.py +++ b/opensearchpy/_async/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/_extra_imports.py b/opensearchpy/_async/_extra_imports.py index 5fd19461..e19a11a9 100644 --- a/opensearchpy/_async/_extra_imports.py +++ b/opensearchpy/_async/_extra_imports.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/client/__init__.py b/opensearchpy/_async/client/__init__.py index 57f56b0f..279fda37 100644 --- a/opensearchpy/_async/client/__init__.py +++ b/opensearchpy/_async/client/__init__.py @@ -25,12 +25,25 @@ # specific language governing permissions and limitations # under the License. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import AsyncTransport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -39,15 +52,16 @@ from .nodes import NodesClient from .plugins import PluginsClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class AsyncOpenSearch(object): +class AsyncOpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -172,7 +186,19 @@ def default(self, obj): """ - def __init__(self, hosts=None, transport_class=AsyncTransport, **kwargs): + # include PIT functions inside _patch.py + from ._patch import ( # type: ignore + create_point_in_time, + delete_point_in_time, + list_all_point_in_time, + ) + + def __init__( + self, + hosts: Any = None, + transport_class: Type[AsyncTransport] = AsyncTransport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -187,7 +213,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -200,15 +226,16 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.security = SecurityClient(self) self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) + self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -217,21 +244,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(AsyncOpenSearch, self).__repr__() - async def __aenter__(self): + async def __aenter__(self) -> Any: if hasattr(self.transport, "_async_call"): await self.transport._async_call() return self - async def __aexit__(self, *_): + async def __aexit__(self, *_: Any) -> None: await self.close() - async def close(self): + async def close(self) -> None: """Closes the Transport and all internal connections""" await self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - async def ping(self, params=None, headers=None): + async def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -244,7 +275,11 @@ async def ping(self, params=None, headers=None): return False @query_params() - async def info(self, params=None, headers=None): + async def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -262,31 +297,38 @@ async def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def create(self, index, id, body, params=None, headers=None): + async def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The document :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -311,51 +353,54 @@ async def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def index(self, index, body, id=None, params=None, headers=None): + async def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a document in an index. - :arg index: The name of the index + :arg index: Index name. :arg body: The document - :arg id: Document ID - :arg if_primary_term: only perform the index operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the index operation if the last - operation that has changed the document has the specified sequence - number + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests - without an explicit document ID Valid choices: index, create + without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: When true, requires destination to be an - alias. Default is false - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + alias. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", - _make_path(index, doc_type, id), + _make_path(index, "_doc", id), params=params, headers=headers, body=body, @@ -372,36 +417,42 @@ async def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - async def bulk(self, body, index=None, params=None, headers=None): + async def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. :arg body: The operation definition and data (action-data pairs), separated by newlines - :arg index: Default index for items which don't provide one + :arg index: Default index for items which don't provide one. :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- - request + request. :arg _source_excludes: Default list of fields to exclude from - the returned _source field, can be overridden on each sub-request + the returned _source field, can be overridden on each sub-request. :arg _source_includes: Default list of fields to extract and - return from the _source field, can be overridden on each sub-request + return from the _source field, can be overridden on each sub-request. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: Sets require_alias for all incoming - documents. Defaults to unset (false) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + documents. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the bulk operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -416,14 +467,20 @@ async def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. - :arg body: A comma-separated list of scroll IDs to clear if none + :arg body: Comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter - :arg scroll_id: A comma-separated list of scroll IDs to clear + :arg scroll_id: Comma-separated list of scroll IDs to clear. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -452,42 +509,49 @@ async def clear_scroll(self, body=None, scroll_id=None, params=None, headers=Non "routing", "terminate_after", ) - async def count(self, body=None, index=None, params=None, headers=None): + async def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. - :arg body: A query to restrict the results specified with the + :arg body: Query to restrict the results specified with the Query DSL (optional) - :arg index: A comma-separated list of indices to restrict the - results + :arg index: Comma-separated list of indices to restrict the + results. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg min_score: Include only documents with a specific `_score` - value in the result + value in the result. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: A comma-separated list of specific routing values - :arg terminate_after: The maximum count for each shard, upon - reaching which the query execution will terminate early + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Comma-separated list of specific routing values. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. """ return await self.transport.perform_request( "POST", @@ -507,42 +571,44 @@ async def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - async def delete(self, index, id, params=None, headers=None): + async def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. - :arg index: The name of the index - :arg id: The document ID - :arg if_primary_term: only perform the delete operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the delete operation if the last - operation that has changed the document has the specified sequence - number + :arg index: Index name. + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "DELETE", _make_path(index, doc_type, id), params=params, headers=headers + "DELETE", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -580,81 +646,87 @@ async def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def delete_by_query(self, index, body, params=None, headers=None): + async def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the delete by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) + all documents). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the effected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Refresh the shard containing the document before + performing the operation. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the delete - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the delete by query is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -673,15 +745,20 @@ async def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + async def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -693,16 +770,24 @@ async def delete_by_query_rethrottle(self, task_id, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_script(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -723,40 +808,44 @@ async def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - async def exists(self, index, id, params=None, headers=None): + async def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "HEAD", _make_path(index, doc_type, id), params=params, headers=headers + "HEAD", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -770,29 +859,35 @@ async def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - async def exists_source(self, index, id, params=None, headers=None): + async def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -818,35 +913,42 @@ async def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def explain(self, index, id, body=None, params=None, headers=None): + async def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The query definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg analyze_wildcard: Specify whether wildcards and prefix - queries in the query string query should be analyzed (default: false) - :arg analyzer: The analyzer for the query string query + queries in the query string query should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR - :arg df: The default field for query string query (default: - _all) + query (AND or OR). Valid choices are AND, OR. + :arg df: The default field for query string query. Default is + _all. :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -865,26 +967,32 @@ async def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - async def field_caps(self, body=None, index=None, params=None, headers=None): + async def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. :arg body: An index filter specified with the Query DSL - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of field names + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fields: Comma-separated list of field names. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_unmapped: Indicates whether unmapped fields should - be included in the response. + be included in the response. Default is false. """ return await self.transport.perform_request( "POST", @@ -906,51 +1014,63 @@ async def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - async def get(self, index, id, params=None, headers=None): + async def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return await self.transport.perform_request( - "GET", _make_path(index, doc_type, id), params=params, headers=headers + "GET", _make_path(index, "_doc", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout") - async def get_script(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + async def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -970,29 +1090,35 @@ async def get_script(self, id, params=None, headers=None): "version", "version_type", ) - async def get_source(self, index, id, params=None, headers=None): + async def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -1014,30 +1140,36 @@ async def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - async def mget(self, body, index=None, params=None, headers=None): + async def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. :arg body: Document identifiers; can be either `docs` - (containing full document information) or `ids` (when index and type is - provided in the URL. - :arg index: The name of the index + (containing full document information) or `ids` (when index is provided + in the URL. + :arg index: Index name. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1059,37 +1191,44 @@ async def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch(self, body, index=None, params=None, headers=None): + async def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in - order to limit the number of concurrent shard requests Default: 5 - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + order to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1110,26 +1249,33 @@ async def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def msearch_template(self, body, index=None, params=None, headers=None): + async def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1157,7 +1303,13 @@ async def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def mtermvectors(self, body=None, index=None, params=None, headers=None): + async def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1169,34 +1321,34 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified - in body "params" or "docs". Default: True - :arg fields: A comma-separated list of fields to return. Applies - to all returned documents unless otherwise specified in body "params" or - "docs". - :arg ids: A comma-separated list of documents ids. You must - define ids as parameter or set "ids" or "docs" in the request body + in body 'params' or 'docs'. Default is True. + :arg fields: Comma-separated list of fields to return. Applies + to all returned documents unless otherwise specified in body 'params' or + 'docs'. + :arg ids: Comma-separated list of documents ids. You must define + ids as parameter or set 'ids' or 'docs' in the request body. :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random) .Applies to all returned documents - unless otherwise specified in body "params" or "docs". + be performed on. Applies to all returned documents unless otherwise + specified in body 'params' or 'docs'. Default is random. :arg realtime: Specifies if requests are real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. Applies to all returned - documents unless otherwise specified in body "params" or "docs". + near-real-time. Default is True. + :arg routing: Routing value. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents - unless otherwise specified in body "params" or "docs". - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + unless otherwise specified in body 'params' or 'docs'. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ path = _make_path(index, "_mtermvectors") @@ -1204,18 +1356,28 @@ async def mtermvectors(self, body=None, index=None, params=None, headers=None): "POST", path, params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def put_script(self, id, body, context=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. - :arg id: Script ID + :arg id: Script ID. :arg body: The document - :arg context: Context name to compile script against - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg context: Script context. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -1232,31 +1394,32 @@ async def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - async def rank_eval(self, body, index=None, params=None, headers=None): + async def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical - search queries + search queries. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be ignored when unavailable (missing or closed). + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1279,7 +1442,12 @@ async def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - async def reindex(self, body, params=None, headers=None): + async def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1289,24 +1457,24 @@ async def reindex(self, body, params=None, headers=None): :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg refresh: Should the affected indexes be refreshed? - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg scroll: Control how long to keep the search context alive - Default: 5m + all documents). + :arg refresh: Should the affected indexes be refreshed?. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 + set to `auto`. Default is 1. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m + shards that are unavailable. Default is 1m. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the reindex operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) - :arg wait_for_completion: Should the request should block until - the reindex is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1316,14 +1484,19 @@ async def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - async def reindex_rethrottle(self, task_id, params=None, headers=None): + async def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1337,14 +1510,18 @@ async def reindex_rethrottle(self, task_id, params=None, headers=None): @query_params() async def render_search_template( - self, body=None, id=None, params=None, headers=None - ): + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. :arg body: The search definition template and its params - :arg id: The id of the stored search template + :arg id: The id of the stored search template. """ return await self.transport.perform_request( "POST", @@ -1355,15 +1532,15 @@ async def render_search_template( ) @query_params() - async def scripts_painless_execute(self, body=None, params=None, headers=None): + async def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Allows an arbitrary script to be executed and a result to be returned - + Allows an arbitrary script to be executed and a result to be returned. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The script to execute """ @@ -1376,18 +1553,25 @@ async def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - async def scroll(self, body=None, scroll_id=None, params=None, headers=None): + async def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. :arg body: The scroll ID if not passed by URL or query parameter. - :arg scroll_id: The scroll ID for scrolled search + :arg scroll_id: Scroll ID. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response + be rendered as an integer or an object in the rest search response. + Default is false. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search + should be maintained for scrolled search. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -1420,7 +1604,6 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): "ignore_unavailable", "lenient", "max_concurrent_shard_requests", - "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", @@ -1445,107 +1628,111 @@ async def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - async def search(self, body=None, index=None, params=None, headers=None): + async def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. :arg body: The search definition using the Query DSL - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg allow_partial_search_results: Indicate if an error should - be returned if there is a partial search failure or timeout Default: - True + be returned if there is a partial search failure or timeout. Default is + True. :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. - Default: 512 + Default is 512. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string - :arg docvalue_fields: A comma-separated list of fields to return - as the docvalue representation of a field for each hit + given in the query string. + :arg docvalue_fields: Comma-separated list of fields to return + as the docvalue representation of a field for each hit. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit - :arg from_: Starting offset (default: 0) + about score computation as part of a hit. + :arg from_: Starting offset. Default is 0. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order - to limit the number of concurrent shard requests Default: 5 - :arg min_compatible_shard_node: The minimum compatible version - that all shards involved in search should have for this request to be - successful - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. :arg seq_no_primary_term: Specify whether to return sequence - number and primary term of the last modification of each hit - :arg size: Number of hits to return (default: 10) - :arg sort: A comma-separated list of : pairs + number and primary term of the last modification of each hit. + :arg size: Number of hits to return. Default is 10. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes - :arg stored_fields: A comma-separated list of stored fields to - return as part of a hit - :arg suggest_field: Specify which field to use for suggestions - :arg suggest_mode: Specify suggest mode Valid choices: missing, - popular, always Default: missing - :arg suggest_size: How many suggestions to return in response + statistical purposes. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg suggest_field: Specify which field to use for suggestions. + :arg suggest_mode: Specify suggest mode. Valid choices are + missing, popular, always. + :arg suggest_size: How many suggestions to return in response. :arg suggest_text: The source text for which the suggestions - should be returned + should be returned. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. :arg track_scores: Whether to calculate and return scores even - if they are not used for sorting + if they are not used for sorting. :arg track_total_hits: Indicate if the number of documents that - match the query should be tracked + match the query should be tracked. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response - :arg version: Specify whether to return document version as part - of a hit + should be prefixed by their respective types in the response. + :arg version: Whether to return document version as part of a + hit. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1567,27 +1754,32 @@ async def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - async def search_shards(self, index=None, params=None, headers=None): + async def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + be performed on. Default is random. + :arg routing: Routing value. """ return await self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers @@ -1608,41 +1800,49 @@ async def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - async def search_template(self, body, index=None, params=None, headers=None): + async def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. :arg body: The search definition template and its params - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit + about score computation as part of a hit. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg profile: Specify whether to profile the query execution + be performed on. Default is random. + :arg profile: Specify whether to profile the query execution. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1668,7 +1868,14 @@ async def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - async def termvectors(self, index, body=None, id=None, params=None, headers=None): + async def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1677,28 +1884,28 @@ async def termvectors(self, index, body=None, id=None, params=None, headers=None :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. - :arg id: The id of the document, when not specified a doc param - should be supplied. + :arg id: Document ID. When not specified a doc param should be + supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be - returned. Default: True - :arg fields: A comma-separated list of fields to return. + returned. Default is True. + :arg fields: Comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. - Default: True + Default is True. :arg payloads: Specifies if term payloads should be returned. - Default: True + Default is True. :arg positions: Specifies if term positions should be returned. - Default: True + Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random). + be performed on. Default is random. :arg realtime: Specifies if request is real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. + near-real-time. Default is True. + :arg routing: Routing value. :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + document frequency should be returned. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -1723,43 +1930,48 @@ async def termvectors(self, index, body=None, id=None, params=None, headers=None "timeout", "wait_for_active_shards", ) - async def update(self, index, id, body, params=None, headers=None): + async def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The request definition requires either `script` or partial `doc` :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field - :arg if_primary_term: only perform the update operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the update operation if the last - operation that has changed the document has the specified sequence - number - :arg lang: The script language (default: painless) + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg lang: The script language. Default is painless. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg require_alias: When true, requires destination is an alias. - Default is false + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. :arg retry_on_conflict: Specify how many times should the - operation be retried when a conflict occurs (default: 0) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + operation be retried when a conflict occurs. Default is 0. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -1804,90 +2016,92 @@ async def update(self, index, id, body, params=None, headers=None): "terminate_after", "timeout", "version", - "version_type", "wait_for_active_shards", "wait_for_completion", ) - async def update_by_query(self, index, body=None, params=None, headers=None): + async def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the update by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg pipeline: Ingest pipeline to set on index requests made by - this action. (default: none) + all documents). + :arg pipeline: The pipeline id to preprocess incoming documents + with. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the affected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Should the affected indexes be refreshed?. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the update - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit - :arg version_type: Should the document increment the version - number (internal) on hit or not (reindex) + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the update by query operation is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1905,15 +2119,20 @@ async def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - async def update_by_query_rethrottle(self, task_id, params=None, headers=None): + async def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1926,120 +2145,117 @@ async def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - async def get_script_context(self, params=None, headers=None): + async def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return await self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers ) @query_params() - async def get_script_languages(self, params=None, headers=None): + async def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns available script types, languages and contexts + Returns available script types, languages and contexts. - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return await self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers ) - @query_params() - async def list_all_point_in_time(self, params=None, headers=None): + @query_params( + "allow_partial_pit_creation", + "expand_wildcards", + "keep_alive", + "preference", + "routing", + ) + async def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns the list of point in times which are alive + Creates point in time context. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_partial_pit_creation: Allow if point in time can be + created with partial failures. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg keep_alive: Specify the keep alive for point in time. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg routing: Comma-separated list of specific routing values. """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + return await self.transport.perform_request( - "GET", - _make_path("_search", "point_in_time", "_all"), + "POST", + _make_path(index, "_search", "point_in_time"), params=params, headers=headers, ) @query_params() - async def delete_point_in_time( - self, body=None, all=False, params=None, headers=None - ): + async def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Delete a point in time + Deletes all active point in time searches. - - :arg body: a point-in-time id to delete - :arg all: set it to `True` to delete all alive point in time. """ - path = ( - _make_path("_search", "point_in_time", "_all") - if all - else _make_path("_search", "point_in_time") - ) return await self.transport.perform_request( - "DELETE", path, params=params, headers=headers, body=body + "DELETE", "/_search/point_in_time/_all", params=params, headers=headers ) - @query_params( - "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" - ) - async def create_point_in_time(self, index=None, params=None, headers=None): + @query_params() + async def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Create a point in time that can be used in subsequent searches + Deletes one or more point in time searches based on the IDs passed. - :arg index: A comma-separated list of index names to create point - in time; use `_all` or empty string to perform the operation on all - indices - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg keep_alive: Specific the time to live for the point in time - :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + :arg body: The point-in-time ids to be deleted """ return await self.transport.perform_request( - "POST", - _make_path(index, "_search", "point_in_time"), + "DELETE", + "/_search/point_in_time", params=params, headers=headers, + body=body, ) @query_params() - async def terms_enum(self, index, body=None, params=None, headers=None): + async def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto- - complete scenarios. - + Lists all active point in time searches. - .. warning:: - - This API is **beta** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices - :arg body: field name, string which is the prefix expected in - matching terms, timeout and size for max number of results """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - return await self.transport.perform_request( - "POST", - _make_path(index, "_terms_enum"), - params=params, - headers=headers, - body=body, + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) diff --git a/opensearchpy/_async/client/__init__.pyi b/opensearchpy/_async/client/__init__.pyi deleted file mode 100644 index 27a47ed9..00000000 --- a/opensearchpy/_async/client/__init__.pyi +++ /dev/null @@ -1,1133 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import AsyncTransport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class AsyncOpenSearch(object): - transport: AsyncTransport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - def __init__( - self, - hosts: Any = ..., - transport_class: Type[AsyncTransport] = ..., - **kwargs: Any, - ) -> None: ... - def __repr__(self) -> str: ... - async def __aenter__(self) -> "AsyncOpenSearch": ... - async def __aexit__(self, *_: Any) -> None: ... - async def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - async def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_script( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - min_compatible_shard_node: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def list_all_point_in_time( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_point_in_time( - self, - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_point_in_time( - self, - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def terms_enum( - self, - index: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/_patch.py b/opensearchpy/_async/client/_patch.py new file mode 100644 index 00000000..cbf24e0b --- /dev/null +++ b/opensearchpy/_async/client/_patch.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import warnings +from typing import Any + +from .utils import SKIP_IN_PATH, query_params + + +@query_params() +async def list_all_point_in_time( + self: Any, params: Any = None, headers: Any = None +) -> Any: + """ + Returns the list of active point in times searches + + .. warning:: + + This API will be removed in a future version + Use 'get_all_pits' API instead. + + """ + warnings.warn( + "The 'list_all_point_in_time' API is deprecated and will be removed in a future version. Use 'get_all_pits' API instead.", + DeprecationWarning, + ) + + return await self.get_all_pits(params=params, headers=headers) + + +@query_params( + "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" +) +async def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: + """ + Create a point in time that can be used in subsequent searches + + + :arg index: A comma-separated list of index names to open point + in time; use `_all` or empty string to perform the operation on all + indices + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: open, + closed, hidden, none, all Default: open + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed) + :arg keep_alive: Specific the time to live for the point in time + :arg preference: Specify the node or shard the operation should + be performed on (default: random) + :arg routing: Specific routing value + + .. warning:: + + This API will be removed in a future version + Use 'create_pit' API instead. + + """ + warnings.warn( + "The 'create_point_in_time' API is deprecated and will be removed in a future version. Use 'create_pit' API instead.", + DeprecationWarning, + ) + + return await self.create_pit(index=index, params=params, headers=headers) + + +@query_params() +async def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: + """ + Delete a point in time + + + :arg body: a point-in-time id to delete + :arg all: set it to `True` to delete all alive point in time. + + .. warning:: + + This API will be removed in a future version + Use 'delete_all_pits' or 'delete_pit' API instead. + + """ + warnings.warn( + "The 'delete_point_in_time' API is deprecated and will be removed in a future version. Use 'delete_all_pits' or 'delete_pit' API instead.", + DeprecationWarning, + ) + + if all: + return await self.delete_all_pits(params=params, headers=headers) + else: + return await self.delete_pit(body=body, params=params, headers=headers) + + +@query_params() +async def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: + """ + Checks to see if the Security plugin is up and running. + + .. warning:: + + This API will be removed in a future version + Use 'health' API instead. + + """ + warnings.warn( + "The 'health_check' API in security client is deprecated and will be removed in a future version. Use 'health' API instead.", + DeprecationWarning, + ) + + return await self.health(params=params, headers=headers) + + +@query_params() +async def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: + """ + A PUT call updates the audit configuration. + + .. warning:: + + This API will be removed in a future version + Use 'update_audit_configuration' API instead. + + """ + warnings.warn( + "The 'update_audit_config' API in security client is deprecated and will be removed in a future version. Use 'update_audit_configuration' API instead.", + DeprecationWarning, + ) + + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.update_audit_configuration( + params=params, headers=headers, body=body + ) diff --git a/opensearchpy/_async/client/cat.py b/opensearchpy/_async/client/cat.py index f7268308..4310511c 100644 --- a/opensearchpy/_async/client/cat.py +++ b/opensearchpy/_async/client/cat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,71 +26,106 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - async def aliases(self, name=None, params=None, headers=None): + async def aliases( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. - :arg name: A comma-separated list of alias names to return + :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + async def all_pit_segments( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return await self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - async def allocation(self, node_id=None, params=None, headers=None): + async def allocation( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -98,57 +134,139 @@ async def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + async def cluster_manager( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - async def count(self, index=None, params=None, headers=None): + async def count( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. - :arg index: A comma-separated list of index names to limit the - returned information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + async def fielddata( + self, + fields: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - async def health(self, params=None, headers=None): + async def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a concise representation of the cluster health. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping Default: True - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg ts: Set to false to disable timestamping. Default is True. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers ) @query_params("help", "s") - async def help(self, params=None, headers=None): + async def help( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns help for the Cat APIs. - :arg help: Return help information + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by + to sort by. """ return await self.transport.perform_request( "GET", "/_cat", params=params, headers=headers @@ -156,6 +274,7 @@ async def help(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "expand_wildcards", "format", "h", @@ -164,488 +283,581 @@ async def help(self, params=None, headers=None): "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "pri", "s", "time", "v", ) - async def indices(self, index=None, params=None, headers=None): + async def indices( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg health: A health status ("green", "yellow", or "red" to - filter only indices matching the specified health status Valid choices: - green, yellow, red - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg health: Health status ('green', 'yellow', or 'red') to + filter only indices matching the specified health status. Valid choices + are green, yellow, red. + :arg help: Return help information. Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg pri: Set to true to return stats only for primary shards + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg pri: Set to true to return stats only for primary shards. + Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - async def master(self, params=None, headers=None): + async def master( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the master node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ from warnings import warn - warn("Deprecated: use `cluster_manager` instead") + warn( + "Deprecated: To promote inclusive language, please use '/_cat/cluster_manager' instead." + ) return await self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) - @query_params("format", "h", "help", "local", "cluster_manager", "s", "v") - async def cluster_manager(self, params=None, headers=None): + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + async def nodeattrs( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the cluster_manager node. + Returns information about custom node attributes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( "bytes", + "cluster_manager_timeout", "format", "full_id", "h", "help", - "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - async def nodes(self, params=None, headers=None): + async def nodes( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. :arg full_id: Return the full node ID instead of the shortened - version (default: false) - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory - :arg local: Calculate the selected nodes using the local cluster - state rather than the state from master node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + version. Default is false. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local (Deprecated: This parameter does not cause this API + to act locally.): Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - async def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information - :arg active_only: If `true`, the response only includes ongoing - shard recoveries - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg detailed: If `true`, the response includes detailed - information about shard recoveries - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers - """ - return await self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - async def shards(self, index=None, params=None, headers=None): + async def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - async def segments(self, index=None, params=None, headers=None): + @query_params() + async def pit_segments( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + List segments for one or several PITs. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers """ return await self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", - "time", "v", ) - async def pending_tasks(self, params=None, headers=None): + async def plugins( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about installed plugins across nodes node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" + ) + async def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about index shard recoveries, both on-going completed. + + + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return await self.transport.perform_request( + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers + ) + + @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", - "size", "v", ) - async def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + async def repositories( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: A comma-separated list of regular- - expressions to filter the thread pools in the output - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: The multiplier in which to display values Valid - choices: , k, m, g, t, p - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - async def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + async def segment_replication( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: A comma-separated list of fields to return in the - output - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", + "cluster_manager_timeout", "format", "h", "help", - "include_bootstrap", - "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - async def plugins(self, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_bootstrap: Include bootstrap plugins in the - response - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", + "time", "v", ) - async def nodeattrs(self, params=None, headers=None): + async def shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", + "size", "v", ) - async def repositories(self, params=None, headers=None): + async def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from master node - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg size: The multiplier in which to display values. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - async def snapshots(self, repository=None, params=None, headers=None): + async def snapshots( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all snapshots in a specific repository. - :arg repository: Name of repository from which to fetch the - snapshot information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg ignore_unavailable: Set to true to ignore unavailable - snapshots - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", @@ -666,101 +878,76 @@ async def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - async def tasks(self, params=None, headers=None): + async def tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg nodes: A comma-separated list of node IDs or names to limit + :arg detailed: Return detailed task information. Default is + false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - async def templates(self, name=None, params=None, headers=None): + async def templates( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about existing templates. - :arg name: A pattern that returned template names must match - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return await self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params( - "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v" - ) - async def transforms(self, transform_id=None, params=None, headers=None): - """ - Gets configuration and usage information about transforms. - - - :arg transform_id: The id of the transform for which to get - stats. '_all' or '*' implies all transforms - :arg allow_no_match: Whether to ignore if a wildcard expression - matches no transforms. (This includes `_all` string or when no - transforms have been specified) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg from_: skips a number of transform configs, defaults to 0 - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: specifies a max number of transforms to get, defaults - to 100 - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers - """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - - return await self.transport.perform_request( - "GET", - _make_path("_cat", "transforms", transform_id), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/cat.pyi b/opensearchpy/_async/client/cat.pyi deleted file mode 100644 index c53d43bc..00000000 --- a/opensearchpy/_async/client/cat.pyi +++ /dev/null @@ -1,555 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - async def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def master( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cluster_manager( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodes( - self, - *, - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def plugins( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - include_bootstrap: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def nodeattrs( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def repositories( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def snapshots( - self, - *, - repository: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def templates( - self, - *, - name: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def transforms( - self, - *, - transform_id: Optional[Any] = ..., - allow_no_match: Optional[Any] = ..., - format: Optional[Any] = ..., - from_: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/client.py b/opensearchpy/_async/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/_async/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/_async/client/cluster.py b/opensearchpy/_async/client/cluster.py index 174245cb..905853e9 100644 --- a/opensearchpy/_async/client/cluster.py +++ b/opensearchpy/_async/client/cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,16 +26,29 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( + "awareness_attribute", + "cluster_manager_timeout", "expand_wildcards", "level", "local", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", "wait_for_events", @@ -43,37 +57,46 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - async def health(self, index=None, params=None, headers=None): + async def health( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the health of the cluster. - :arg index: Limit the information returned to a specific index + :arg index: Limit the information returned to specific indicies. + :arg awareness_attribute: The awareness attribute for which the + health is required. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg level: Specify the level of detail for returned information - Valid choices: cluster, indices, shards Default: cluster + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg level: Specify the level of detail for returned + information. Valid choices are cluster, indices, shards, + awareness_attributes. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of - shards is active + shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed Valid choices: immediate, urgent, - high, normal, low, languid + with the given priority are processed. Valid choices are immediate, + urgent, high, normal, low, languid. :arg wait_for_no_initializing_shards: Whether to wait until - there are no initializing shards in the cluster + there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there - are no relocating shards in the cluster + are no relocating shards in the cluster. :arg wait_for_nodes: Wait until the specified number of nodes is - available - :arg wait_for_status: Wait until cluster is in a specific state - Valid choices: green, yellow, red + available. + :arg wait_for_status: Wait until cluster is in a specific state. + Valid choices are green, yellow, red. """ return await self.transport.perform_request( "GET", @@ -82,17 +105,24 @@ async def health(self, index=None, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - async def pending_tasks(self, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + async def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -100,43 +130,52 @@ async def pending_tasks(self, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", "wait_for_metadata_version", "wait_for_timeout", ) - async def state(self, metric=None, index=None, params=None, headers=None): + async def state( + self, + metric: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, version - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + metrics. Valid choices are _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_metadata_version: Wait for the metadata version to - be equal or greater than the specified metadata version + be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for - wait_for_metadata_version before timing out + wait_for_metadata_version before timing out. """ if index and metric in SKIP_IN_PATH: metric = "_all" @@ -149,18 +188,23 @@ async def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - async def stats(self, node_id=None, params=None, headers=None): + async def stats( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns high-level overview of cluster statistics. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg flat_settings: Return settings in flat format (default: - false) - :arg timeout: Explicit operation timeout + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", @@ -172,83 +216,99 @@ async def stats(self, node_id=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "explain", "master_timeout", - "cluster_manager_timeout", "metric", "retry_failed", "timeout", ) - async def reroute(self, body=None, params=None, headers=None): + async def reroute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: Simulate the operation only and return the - resulting state + resulting state. :arg explain: Return an explanation of why the commands can or - cannot be executed - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + cannot be executed. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg metric: Limit the information returned to the specified - metrics. Defaults to all but metadata Valid choices: _all, blocks, - metadata, nodes, routing_table, master_node, version + metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked - due to too many subsequent allocation failures - :arg timeout: Explicit operation timeout + due to too many subsequent allocation failures. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "flat_settings", "include_defaults", "master_timeout", - "cluster_manager_timeout", "timeout", ) - async def get_settings(self, params=None, headers=None): + async def get_settings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns cluster settings. - :arg flat_settings: Return settings in flat format (default: - false) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg include_defaults: Whether to return all default clusters - setting. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + setting. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( - "flat_settings", "master_timeout", "cluster_manager_timeout", "timeout" + "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - async def put_settings(self, body, params=None, headers=None): + async def put_settings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster settings. :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). - :arg flat_settings: Return settings in flat format (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -258,7 +318,11 @@ async def put_settings(self, body, params=None, headers=None): ) @query_params() - async def remote_info(self, params=None, headers=None): + async def remote_info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -268,7 +332,12 @@ async def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - async def allocation_explain(self, body=None, params=None, headers=None): + async def allocation_explain( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -276,9 +345,9 @@ async def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes (default: false) + shard sizes. Default is false. :arg include_yes_decisions: Return 'YES' decisions in - explanation (default: false) + explanation. Default is false. """ return await self.transport.perform_request( "POST", @@ -288,16 +357,24 @@ async def allocation_explain(self, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_component_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes a component template + Deletes a component template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -309,19 +386,25 @@ async def delete_component_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - async def get_component_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + async def get_component_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns one or more component templates + Returns one or more component templates. - :arg name: The comma separated names of the component templates + :arg name: The Comma-separated names of the component templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", @@ -330,19 +413,28 @@ async def get_component_template(self, name=None, params=None, headers=None): headers=headers, ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "timeout") - async def put_component_template(self, name, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") + async def put_component_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Creates or updates a component template + Creates or updates a component template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -356,19 +448,25 @@ async def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - async def exists_component_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + async def exists_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about whether a particular component template exist + Returns information about whether a particular component template exist. - :arg name: The name of the template + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -381,14 +479,18 @@ async def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - async def delete_voting_config_exclusions(self, params=None, headers=None): + async def delete_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears cluster voting config exclusions. :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. Default: True + configuration exclusions list. Default is True. """ return await self.transport.perform_request( "DELETE", @@ -398,19 +500,166 @@ async def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - async def post_voting_config_exclusions(self, params=None, headers=None): + async def post_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. - :arg node_ids: A comma-separated list of the persistent ids of - the nodes to exclude from the voting configuration. If specified, you - may not also specify ?node_names. - :arg node_names: A comma-separated list of the names of the + :arg node_ids: Comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may - not also specify ?node_ids. - :arg timeout: Explicit operation timeout Default: 30s + not also specify ?node_names. + :arg node_names: Comma-separated list of the names of the nodes + to exclude from the voting configuration. If specified, you may not also + specify ?node_ids. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) + + @query_params() + async def delete_decommission_awareness( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Delete any existing decommission. + + """ + return await self.transport.perform_request( + "DELETE", + "/_cluster/decommission/awareness/", + params=params, + headers=headers, + ) + + @query_params() + async def delete_weighted_routing( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Delete weighted shard routing weights. + + """ + return await self.transport.perform_request( + "DELETE", + "/_cluster/routing/awareness/weights", + params=params, + headers=headers, + ) + + @query_params() + async def get_decommission_awareness( + self, + awareness_attribute_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Get details and status of decommissioned attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + """ + if awareness_attribute_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'awareness_attribute_name'." + ) + + return await self.transport.perform_request( + "GET", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + "_status", + ), + params=params, + headers=headers, + ) + + @query_params() + async def get_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Fetches weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return await self.transport.perform_request( + "GET", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) + + @query_params() + async def put_decommission_awareness( + self, + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Decommissions an awareness attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + :arg awareness_attribute_value: Awareness attribute value. + """ + for param in (awareness_attribute_name, awareness_attribute_value): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return await self.transport.perform_request( + "PUT", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + awareness_attribute_value, + ), + params=params, + headers=headers, + ) + + @query_params() + async def put_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Updates weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return await self.transport.perform_request( + "PUT", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) diff --git a/opensearchpy/_async/client/cluster.pyi b/opensearchpy/_async/client/cluster.pyi deleted file mode 100644 index f2cd948c..00000000 --- a/opensearchpy/_async/client/cluster.pyi +++ /dev/null @@ -1,344 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - async def health( - self, - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def pending_tasks( - self, - *, - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reroute( - self, - *, - body: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_component_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_component_template( - self, - *, - name: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_component_template( - self, - name: Any, - *, - body: Any, - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_component_template( - self, - name: Any, - *, - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/dangling_indices.py b/opensearchpy/_async/client/dangling_indices.py index ff9e533f..6bc9a343 100644 --- a/opensearchpy/_async/client/dangling_indices.py +++ b/opensearchpy/_async/client/dangling_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,24 +26,44 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def delete_dangling_index(self, index_uuid, params=None, headers=None): + async def delete_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified dangling index + Deletes the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to delete - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -55,19 +76,27 @@ async def delete_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - async def import_dangling_index(self, index_uuid, params=None, headers=None): + async def import_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Imports the specified dangling index + Imports the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to import - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -77,7 +106,11 @@ async def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - async def list_dangling_indices(self, params=None, headers=None): + async def list_dangling_indices( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/_async/client/dangling_indices.pyi b/opensearchpy/_async/client/dangling_indices.pyi deleted file mode 100644 index c9bb9ec1..00000000 --- a/opensearchpy/_async/client/dangling_indices.pyi +++ /dev/null @@ -1,89 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - async def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/features.py b/opensearchpy/_async/client/features.py index 7922f955..1b69aa04 100644 --- a/opensearchpy/_async/client/features.py +++ b/opensearchpy/_async/client/features.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - async def get_features(self, params=None, headers=None): + async def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -46,7 +49,7 @@ async def get_features(self, params=None, headers=None): ) @query_params() - async def reset_features(self, params=None, headers=None): + async def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/_async/client/features.pyi b/opensearchpy/_async/client/features.pyi deleted file mode 100644 index 96acb588..00000000 --- a/opensearchpy/_async/client/features.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - async def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/indices.py b/opensearchpy/_async/client/indices.py index d58a3fb5..a4ef8b5b 100644 --- a/opensearchpy/_async/client/indices.py +++ b/opensearchpy/_async/client/indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,30 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - async def analyze(self, body=None, index=None, params=None, headers=None): + async def analyze( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -38,7 +57,7 @@ async def analyze(self, body=None, index=None, params=None, headers=None): :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed - :arg index: The name of the index to scope the operation + :arg index: The name of the index to scope the operation. """ return await self.transport.perform_request( "POST", @@ -49,21 +68,26 @@ async def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def refresh(self, index=None, params=None, headers=None): + async def refresh( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the refresh operation in one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return await self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers @@ -76,49 +100,63 @@ async def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - async def flush(self, index=None, params=None, headers=None): + async def flush( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the flush operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as - internal) + internal). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is - already executing. The default is true. If set to false the flush will - be skipped iff if another flush operation is already running. + already executing. If set to false the flush will be skipped iff if + another flush operation is already running. Default is True. """ return await self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def create(self, index, body=None, params=None, headers=None): + async def create( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates an index with optional settings and mappings. - :arg index: The name of the index + :arg index: Index name. :arg body: The configuration for the index (`settings` and `mappings`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ @@ -130,20 +168,30 @@ async def create(self, index, body=None, params=None, headers=None): ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - async def clone(self, index, target, body=None, params=None, headers=None): + async def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Clones an index + Clones an index. - :arg index: The name of the source index to clone - :arg target: The name of the target index to clone into + :arg index: The name of the source index to clone. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ @@ -161,35 +209,45 @@ async def clone(self, index, target, body=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) - async def get(self, index, params=None, headers=None): + async def get( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about one or more indices. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -200,30 +258,38 @@ async def get(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - async def open(self, index, params=None, headers=None): + async def open( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Opens an index. - :arg index: A comma separated list of indices to open + :arg index: Comma-separated list of indices to open. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ @@ -236,34 +302,40 @@ async def open(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - async def close(self, index, params=None, headers=None): + async def close( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Closes an index. - :arg index: A comma separated list of indices to close + :arg index: Comma-separated list of indices to close. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. Set to `index-setting` to wait - according to the index setting `index.write.wait_for_active_shards`, or - `all` to wait for all shards, or an integer. Defaults to `0`. + wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -274,29 +346,39 @@ async def close(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) - async def delete(self, index, params=None, headers=None): + async def delete( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index. - :arg index: A comma-separated list of indices to delete; use - `_all` or `*` string to delete all indices - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg index: Comma-separated list of indices to delete; use + `_all` or `*` string to delete all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -313,25 +395,32 @@ async def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - async def exists(self, index, params=None, headers=None): + async def exists( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index exists. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -342,39 +431,50 @@ async def exists(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "write_index_only", ) - async def put_mapping(self, body, index=None, params=None, headers=None): + async def put_mapping( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index mappings. :arg body: The mapping definition - :arg index: A comma-separated list of index names the mapping - should be added to (supports wildcards); use `_all` or omit to add the - mapping on all indices. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg write_index_only: When true, applies mappings only to the - write index of an alias or data stream + write index of an alias or data stream. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") + if index in SKIP_IN_PATH: + index = "_all" + return await self.transport.perform_request( "PUT", _make_path(index, "_mapping"), @@ -385,36 +485,42 @@ async def put_mapping(self, body, index=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", ) - async def get_mapping(self, index=None, params=None, headers=None): + async def get_mapping( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mappings for one or more indices. - :arg index: A comma-separated list of index names + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg local (Deprecated: This parameter is a no-op and field + mappings are always retrieved locally.): Return local information, do + not retrieve the state from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( - "GET", - _make_path(index, "_mapping"), - params=params, - headers=headers, + "GET", _make_path(index, "_mapping"), params=params, headers=headers ) @query_params( @@ -424,25 +530,31 @@ async def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - async def get_field_mapping(self, fields, index=None, params=None, headers=None): + async def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mapping for one or more fields. - :arg fields: A comma-separated list of fields - :arg index: A comma-separated list of index names + :arg fields: Comma-separated list of fields. + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether the default mapping values should - be returned as well + be returned as well. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") @@ -454,21 +566,30 @@ async def get_field_mapping(self, fields, index=None, params=None, headers=None) headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def put_alias(self, index, name, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an alias. - :arg index: A comma-separated list of index names the alias - should point to (supports wildcards); use `_all` to perform the - operation on all indices. - :arg name: The name of the alias to be created or updated + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: The name of the alias to be created or updated. :arg body: The settings for the alias, such as `routing` or `filter` - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -483,24 +604,29 @@ async def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def exists_alias(self, name, index=None, params=None, headers=None): + async def exists_alias( + self, + name: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular alias exists. - :arg name: A comma-separated list of alias names to return - :arg index: A comma-separated list of index names to filter - aliases + :arg name: Comma-separated list of alias names. + :arg index: Comma-separated list of indices to filter aliases. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -510,39 +636,52 @@ async def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - async def get_alias(self, index=None, name=None, params=None, headers=None): + async def get_alias( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an alias. - :arg index: A comma-separated list of index names to filter - aliases - :arg name: A comma-separated list of alias names to return + :arg index: Comma-separated list of indices to filter aliases. + :arg name: Comma-separated list of alias names. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def update_aliases(self, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def update_aliases( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates index aliases. :arg body: The definition of `actions` to perform - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Request timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -551,19 +690,28 @@ async def update_aliases(self, body, params=None, headers=None): "POST", "/_aliases", params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_alias(self, index, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_alias( + self, + index: Any, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an alias. - :arg index: A comma-separated list of index names (supports - wildcards); use `_all` for all indices - :arg name: A comma-separated list of aliases to delete (supports + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -573,21 +721,30 @@ async def delete_alias(self, index, name, params=None, headers=None): "DELETE", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "order") - async def put_template(self, name, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "create", "master_timeout", "order") + async def put_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower - numbers) + numbers). """ for param in (name, body): if param in SKIP_IN_PATH: @@ -601,21 +758,27 @@ async def put_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - async def exists_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + async def exists_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -624,36 +787,50 @@ async def exists_template(self, name, params=None, headers=None): "HEAD", _make_path("_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - async def get_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + async def get_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -664,38 +841,47 @@ async def delete_template(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) - async def get_settings(self, index=None, name=None, params=None, headers=None): + async def get_settings( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns settings for one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg name: The name of the settings that should be included + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of settings. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers @@ -703,38 +889,47 @@ async def get_settings(self, index=None, name=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "preserve_existing", "timeout", ) - async def put_settings(self, body, index=None, params=None, headers=None): + async def put_settings( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index settings. :arg body: The index settings to be updated - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg preserve_existing: Whether to update existing settings. If - set to `true` existing settings on an index remain unchanged, the - default is `false` - :arg timeout: Explicit operation timeout + set to `true` existing settings on an index remain unchanged. Default is + false. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -757,43 +952,46 @@ async def put_settings(self, body, index=None, params=None, headers=None): "include_segment_file_sizes", "include_unloaded_segments", "level", - "types", ) - async def stats(self, index=None, metric=None, params=None, headers=None): + async def stats( + self, + index: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in an index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg metric: Limit the information returned the specific - metrics. Valid choices: _all, completion, docs, fielddata, query_cache, - flush, get, indexing, merge, request_cache, refresh, search, segments, - store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) + metrics. Valid choices are _all, store, indexing, get, search, merge, + flush, refresh, query_cache, fielddata, docs, warmer, completion, + segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if - expand_wildcards expands to closed indices Default: True - :arg groups: A comma-separated list of search groups for - `search` index metric + expand_wildcards expands to closed indices. Default is True. + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) + applies if segment stats are requested). Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg level: Return stats aggregated at cluster, index or shard - level Valid choices: cluster, indices, shards Default: indices - :arg types: A comma-separated list of document types for the - `indexing` index metric + level. Valid choices are cluster, indices, shards. """ return await self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers @@ -802,22 +1000,28 @@ async def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - async def segments(self, index=None, params=None, headers=None): + async def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg verbose: Includes detailed memory usage by Lucene. + should be ignored when unavailable (missing or closed). + :arg verbose: Includes detailed memory usage by Lucene. Default + is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers @@ -837,36 +1041,41 @@ async def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - async def validate_query(self, body=None, index=None, params=None, headers=None): + async def validate_query( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. :arg body: The query definition specified with the Query DSL - :arg index: A comma-separated list of index names to restrict - the operation; use `_all` or empty string to perform the operation on - all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg all_shards: Execute validation on all shards instead of one - random shard per index + random shard per index. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg explain: Return detailed information about the error + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Return detailed information about the error. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored - :arg q: Query in the Lucene query string syntax + as providing text to a numeric field) should be ignored. + :arg q: Query in the Lucene query string syntax. :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ @@ -887,43 +1096,53 @@ async def validate_query(self, body=None, index=None, params=None, headers=None) "query", "request", ) - async def clear_cache(self, index=None, params=None, headers=None): + async def clear_cache( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears all or specific caches for one or more indices. - :arg index: A comma-separated list of index name to limit the - operation + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata: Clear field data - :arg fields: A comma-separated list of fields to clear when - using the `fielddata` parameter (default: all) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata: Clear field data. + :arg fields: Comma-separated list of fields to clear when using + the `fielddata` parameter (default: all). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg query: Clear query caches - :arg request: Clear request cache + should be ignored when unavailable (missing or closed). + :arg query: Clear query caches. + :arg request: Clear request cache. """ return await self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers ) @query_params("active_only", "detailed") - async def recovery(self, index=None, params=None, headers=None): + async def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg active_only: Display only those recoveries that are - currently on-going + currently on-going. Default is false. :arg detailed: Whether to display detailed information about - shard recovery + shard recovery. Default is false. """ return await self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers @@ -936,46 +1155,56 @@ async def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - async def upgrade(self, index=None, params=None, headers=None): + async def upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - DEPRECATED Upgrades to the current version of Lucene. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg only_ancient_segments: If true, only ancient (an older - Lucene major release) segments will be upgraded - :arg wait_for_completion: Specify whether the request should - block until the all segments are upgraded (default: false) + Lucene major release) segments will be upgraded. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def get_upgrade(self, index=None, params=None, headers=None): + async def get_upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - DEPRECATED Returns a progress status of current upgrade. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return await self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers @@ -984,24 +1213,28 @@ async def get_upgrade(self, index=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - async def shard_stores(self, index=None, params=None, headers=None): + async def shard_stores( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides store information for shard copies of indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg status: A comma-separated list of statuses used to filter - on shards to get store information for Valid choices: green, yellow, - red, all + should be ignored when unavailable (missing or closed). + :arg status: Comma-separated list of statuses used to filter on + shards to get store information for. """ return await self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers @@ -1015,53 +1248,68 @@ async def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - async def forcemerge(self, index=None, params=None, headers=None): + async def forcemerge( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the force merge operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg flush: Specify whether the index should be flushed after - performing the operation (default: true) + performing the operation. Default is True. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg max_num_segments: The number of segments the index should - be merged into (default: dynamic) + be merged into (default: dynamic). :arg only_expunge_deletes: Specify whether the operation should - only expunge deleted documents + only expunge deleted documents. """ return await self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - async def shrink(self, index, target, body=None, params=None, headers=None): + async def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. - :arg index: The name of the source index to shrink - :arg target: The name of the target index to shrink into + :arg index: The name of the source index to shrink. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1078,27 +1326,37 @@ async def shrink(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - async def split(self, index, target, body=None, params=None, headers=None): + async def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. - :arg index: The name of the source index to split - :arg target: The name of the target index to split into + :arg index: The name of the source index to split. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1115,30 +1373,38 @@ async def split(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) async def rollover( - self, alias, body=None, new_index=None, params=None, headers=None - ): + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. - :arg alias: The name of the alias to rollover + :arg alias: The name of the alias to rollover. :arg body: The conditions that needs to be met for executing rollover - :arg new_index: The name of the rollover index + :arg new_index: The name of the rollover index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: If set to true the rollover action will only be - validated but not actually performed even if a condition matches. The - default is false - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + validated but not actually performed even if a condition matches. + Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. @@ -1154,133 +1420,45 @@ async def rollover( body=body, ) - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - async def freeze(self, index, params=None, headers=None): - """ - Freezes an index. A frozen index has almost no overhead on the cluster (except - for maintaining its metadata in memory) and is read-only. - - - :arg index: The name of the index to freeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_freeze"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - async def unfreeze(self, index, params=None, headers=None): - """ - Unfreezes an index. When a frozen index is unfrozen, the index goes through the - normal recovery process and becomes writeable again. - - - :arg index: The name of the index to unfreeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_unfreeze"), params=params, headers=headers - ) - - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - async def reload_search_analyzers(self, index, params=None, headers=None): - """ - Reloads an index's search analyzers and their resources. - - - :arg index: A comma-separated list of index names to reload - analyzers for - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "GET", - _make_path(index, "_reload_search_analyzers"), - params=params, - headers=headers, - ) - @query_params() - async def create_data_stream(self, name, params=None, headers=None): + async def create_data_stream( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Creates a data stream + Creates or updates a data stream. - :arg name: The name of the data stream + :arg name: The name of the data stream. + :arg body: The data stream definition """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return await self.transport.perform_request( - "PUT", _make_path("_data_stream", name), params=params, headers=headers + "PUT", + _make_path("_data_stream", name), + params=params, + headers=headers, + body=body, ) - @query_params("expand_wildcards") - async def delete_data_stream(self, name, params=None, headers=None): + @query_params() + async def delete_data_stream( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a data stream. - :arg name: A comma-separated list of data streams to delete; use - `*` to delete all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1289,16 +1467,24 @@ async def delete_data_stream(self, name, params=None, headers=None): "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_index_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1310,21 +1496,27 @@ async def delete_index_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - async def exists_index_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + async def exists_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. - :arg name: The name of the template - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1333,40 +1525,55 @@ async def exists_index_template(self, name, params=None, headers=None): "HEAD", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - async def get_index_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + async def get_index_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - async def put_index_template(self, name, body, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + async def put_index_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition :arg cause: User defined reason for creating/updating the index - template + template. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -1380,24 +1587,33 @@ async def put_index_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - async def simulate_index_template(self, name, body=None, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + async def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the - system + system. :arg name: The name of the index (it must be a concrete index - name) + name). :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1410,38 +1626,49 @@ async def simulate_index_template(self, name, body=None, params=None, headers=No body=body, ) - @query_params("expand_wildcards") - async def get_data_stream(self, name=None, params=None, headers=None): + @query_params() + async def get_data_stream( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns data streams. - :arg name: A comma-separated list of data streams to get; use - `*` to get all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return await self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - async def simulate_template(self, body=None, name=None, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + async def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Simulate resolving the given template name or body + Simulate resolving the given template name or body. :arg body: New index template definition to be simulated, if no index template name is specified - :arg name: The name of the index template + :arg name: The name of the template. :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "POST", @@ -1452,21 +1679,21 @@ async def simulate_template(self, body=None, name=None, params=None, headers=Non ) @query_params("expand_wildcards") - async def resolve_index(self, name, params=None, headers=None): + async def resolve_index( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about any matching indices, aliases, and data streams - - - .. warning:: + Returns information about any matching indices, aliases, and data streams. - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg name: A comma-separated list of names or wildcard - expressions - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of names or wildcard + expressions. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1477,31 +1704,40 @@ async def resolve_index(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) - async def add_block(self, index, block, params=None, headers=None): + async def add_block( + self, + index: Any, + block: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds a block to an index. - :arg index: A comma separated list of indices to add a block to + :arg index: Comma-separated list of indices to add a block to. :arg block: The block to add (one of read, write, read_only or - metadata) + metadata). :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, block): if param in SKIP_IN_PATH: @@ -1512,13 +1748,18 @@ async def add_block(self, index, block, params=None, headers=None): ) @query_params() - async def data_streams_stats(self, name=None, params=None, headers=None): + async def data_streams_stats( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. - :arg name: A comma-separated list of data stream names; use - `_all` or empty string to perform the operation on all data streams + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return await self.transport.perform_request( "GET", @@ -1526,115 +1767,3 @@ async def data_streams_stats(self, name=None, params=None, headers=None): params=params, headers=headers, ) - - @query_params() - async def promote_data_stream(self, name, params=None, headers=None): - """ - Promotes a data stream from a replicated data stream managed by CCR to a - regular data stream - - - :arg name: The name of the data stream - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return await self.transport.perform_request( - "POST", - _make_path("_data_stream", "_promote", name), - params=params, - headers=headers, - ) - - @query_params() - async def migrate_to_data_stream(self, name, params=None, headers=None): - """ - Migrates an alias to a data stream - - - :arg name: The name of the alias to migrate - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return await self.transport.perform_request( - "POST", - _make_path("_data_stream", "_migrate", name), - params=params, - headers=headers, - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "flush", - "ignore_unavailable", - "run_expensive_tasks", - ) - async def disk_usage(self, index, params=None, headers=None): - """ - Analyzes the disk usage of each field of an index or data stream - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: Comma-separated list of indices or data streams to - analyze the disk usage - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flush: Whether flush or not before analyzing the index disk - usage. Defaults to true - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg run_expensive_tasks: Must be set to [true] in order for the - task to be performed. Defaults to false. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "POST", _make_path(index, "_disk_usage"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable" - ) - async def field_usage_stats(self, index, params=None, headers=None): - """ - Returns the field usage stats for each field of an index - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of fields to include in the - stats if only a subset of fields should be returned (supports wildcards) - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return await self.transport.perform_request( - "GET", - _make_path(index, "_field_usage_stats"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/indices.pyi b/opensearchpy/_async/client/indices.pyi deleted file mode 100644 index 53f6d87f..00000000 --- a/opensearchpy/_async/client/indices.pyi +++ /dev/null @@ -1,1254 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - async def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def update_aliases( - self, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_alias( - self, - index: Any, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_template( - self, - name: Any, - *, - body: Any, - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_template( - self, - name: Any, - *, - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_template( - self, - *, - name: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def flush_synced( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def freeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def unfreeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def reload_search_analyzers( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_data_stream( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_index_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def exists_index_template( - self, - name: Any, - *, - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - async def get_index_template( - self, - *, - name: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_data_stream( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def promote_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def migrate_to_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def disk_usage( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - run_expensive_tasks: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def field_usage_stats( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/ingest.py b/opensearchpy/_async/client/ingest.py index dfc8eb1c..2f8cff27 100644 --- a/opensearchpy/_async/client/ingest.py +++ b/opensearchpy/_async/client/ingest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,42 +26,65 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "summary") - async def get_pipeline(self, id=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + async def get_pipeline( + self, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a pipeline. - :arg id: Comma separated list of pipeline ids. Wildcards - supported - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg summary: Return pipelines without their definitions - (default: false) + :arg id: Comma-separated list of pipeline ids. Wildcards + supported. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def put_pipeline(self, id, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def put_pipeline( + self, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a pipeline. - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg body: The ingest definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -74,18 +98,24 @@ async def put_pipeline(self, id, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_pipeline(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_pipeline( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a pipeline. - :arg id: Pipeline ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg id: Pipeline ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -98,15 +128,21 @@ async def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - async def simulate(self, body, id=None, params=None, headers=None): + async def simulate( + self, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. :arg body: The simulate definition - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline + processor in executed pipeline. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -120,7 +156,11 @@ async def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - async def processor_grok(self, params=None, headers=None): + async def processor_grok( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of the built-in patterns. @@ -128,13 +168,3 @@ async def processor_grok(self, params=None, headers=None): return await self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) - - @query_params() - async def geo_ip_stats(self, params=None, headers=None): - """ - Returns statistical information about geoip databases - - """ - return await self.transport.perform_request( - "GET", "/_ingest/geoip/stats", params=params, headers=headers - ) diff --git a/opensearchpy/_async/client/ingest.pyi b/opensearchpy/_async/client/ingest.pyi deleted file mode 100644 index 7e498b6c..00000000 --- a/opensearchpy/_async/client/ingest.pyi +++ /dev/null @@ -1,143 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - async def get_pipeline( - self, - *, - id: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - summary: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def put_pipeline( - self, - id: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_pipeline( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def geo_ip_stats( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/nodes.py b/opensearchpy/_async/client/nodes.py index d437fd17..36146fad 100644 --- a/opensearchpy/_async/client/nodes.py +++ b/opensearchpy/_async/client/nodes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,24 +26,40 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") async def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. - :arg body: An object containing the password for the - opensearch keystore - :arg node_id: A comma-separated list of node IDs to span the + :arg body: An object containing the password for the opensearch + keystore + :arg node_id: Comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "POST", @@ -53,21 +70,27 @@ async def reload_secure_settings( ) @query_params("flat_settings", "timeout") - async def info(self, node_id=None, metric=None, params=None, headers=None): + async def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg metric: A comma-separated list of metrics you wish - returned. Leave empty to return all. Valid choices: settings, os, - process, jvm, thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format (default: - false) - :arg timeout: Explicit operation timeout + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg metric: Comma-separated list of metrics you wish returned. + Leave empty to return all. Valid choices are settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers @@ -79,49 +102,50 @@ async def info(self, node_id=None, metric=None, params=None, headers=None): "fields", "groups", "include_segment_file_sizes", - "include_unloaded_segments", "level", "timeout", "types", ) async def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, - process, thread_pool, transport, discovery, indexing_pressure + metrics. Valid choices are _all, breaker, fs, http, indices, jvm, os, + process, thread_pool, transport, discovery, indexing_pressure. :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, completion, docs, - fielddata, query_cache, flush, get, indexing, merge, request_cache, - refresh, search, segments, store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) - :arg groups: A comma-separated list of search groups for - `search` index metric + metric isn't specified. Valid choices are _all, store, indexing, get, + search, merge, flush, refresh, query_cache, fielddata, docs, warmer, + completion, segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory + applies if segment stats are requested). Default is false. :arg level: Return indices stats aggregated at index, node or - shard level Valid choices: indices, node, shards Default: node - :arg timeout: Explicit operation timeout - :arg types: A comma-separated list of document types for the - `indexing` index metric + shard level. Valid choices are indices, node, shards. + :arg timeout: Operation timeout. + :arg types: Comma-separated list of document types for the + `indexing` index metric. """ return await self.transport.perform_request( "GET", @@ -133,26 +157,31 @@ async def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - async def hot_threads(self, node_id=None, params=None, headers=None): + async def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg doc_type: The type to sample (default: cpu) Valid choices: - cpu, wait, block + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg doc_type: The type to sample. Valid choices are cpu, wait, + block. :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue (default: true) - :arg interval: The interval for the second sampling of threads - :arg snapshots: Number of samples of thread stacktrace (default: - 10) + task queue. Default is True. + :arg interval: The interval for the second sampling of threads. + :arg snapshots: Number of samples of thread stacktrace. Default + is 10. :arg threads: Specify the number of threads to provide - information for (default: 3) - :arg timeout: Explicit operation timeout + information for. Default is 3. + :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead if "doc_type" in params: @@ -166,18 +195,24 @@ async def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - async def usage(self, node_id=None, metric=None, params=None, headers=None): + async def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, rest_actions - :arg timeout: Explicit operation timeout + metrics. Valid choices are _all, rest_actions. + :arg timeout: Operation timeout. """ return await self.transport.perform_request( "GET", diff --git a/opensearchpy/_async/client/nodes.pyi b/opensearchpy/_async/client/nodes.pyi deleted file mode 100644 index 5f108df4..00000000 --- a/opensearchpy/_async/client/nodes.pyi +++ /dev/null @@ -1,140 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - async def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/plugins.py b/opensearchpy/_async/client/plugins.py index 2b762ba3..b12214d7 100644 --- a/opensearchpy/_async/client/plugins.py +++ b/opensearchpy/_async/client/plugins.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,14 +9,19 @@ # GitHub history for details. import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -27,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ @@ -44,7 +50,7 @@ def _dynamic_lookup(self, client): setattr(client, plugin, getattr(self, plugin)) else: warnings.warn( - f"Cannot load `{plugin}` directly to AsyncOpenSearch. `{plugin}` already exists in AsyncOpenSearch. Please use `AsyncOpenSearch.plugin.{plugin}` instead.", + f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", category=RuntimeWarning, stacklevel=2, ) diff --git a/opensearchpy/_async/client/plugins.pyi b/opensearchpy/_async/client/plugins.pyi deleted file mode 100644 index 88383d01..00000000 --- a/opensearchpy/_async/client/plugins.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import AsyncOpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: AsyncOpenSearch) -> None: ... diff --git a/opensearchpy/_async/client/remote.py b/opensearchpy/_async/client/remote.py index 02aa931d..433c9fa5 100644 --- a/opensearchpy/_async/client/remote.py +++ b/opensearchpy/_async/client/remote.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - async def info(self, params=None, headers=None): + async def info(self, params: Any = None, headers: Any = None) -> Any: return await self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/_async/client/remote.pyi b/opensearchpy/_async/client/remote.pyi deleted file mode 100644 index 068c690b..00000000 --- a/opensearchpy/_async/client/remote.pyi +++ /dev/null @@ -1,45 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - async def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/_async/client/remote_store.py b/opensearchpy/_async/client/remote_store.py new file mode 100644 index 00000000..8a72f41c --- /dev/null +++ b/opensearchpy/_async/client/remote_store.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + +from .utils import SKIP_IN_PATH, NamespacedClient, query_params + + +class RemoteStoreClient(NamespacedClient): + @query_params("cluster_manager_timeout", "wait_for_completion") + async def restore( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Restores from remote store. + + + :arg body: Comma-separated list of index IDs + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "POST", "/_remotestore/_restore", params=params, headers=headers, body=body + ) diff --git a/opensearchpy/_async/client/security.py b/opensearchpy/_async/client/security.py index 65021765..dc893f86 100644 --- a/opensearchpy/_async/client/security.py +++ b/opensearchpy/_async/client/security.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,46 +8,77 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + +from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): + from ._patch import health_check, update_audit_config # type: ignore + @query_params() - async def get_account_details(self, params=None, headers=None): + async def get_account_details( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns account details for the current user. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "account"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/account", params=params, headers=headers ) @query_params() - async def change_password(self, body, params=None, headers=None): + async def change_password( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the password for the current user. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "account"), + "/_plugins/_security/api/account", params=params, headers=headers, body=body, ) @query_params() - async def get_action_group(self, action_group, params=None, headers=None): + async def get_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one action group. + + + :arg action_group: Action group to retrieve. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return await self.transport.perform_request( @@ -57,25 +89,38 @@ async def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def get_action_groups(self, params=None, headers=None): + async def get_action_groups( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all action groups. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups/", params=params, headers=headers, ) @query_params() - async def delete_action_group(self, action_group, params=None, headers=None): + async def delete_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified action group. + Delete a specified action group. + + + :arg action_group: Action group to delete. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return await self.transport.perform_request( @@ -86,9 +131,19 @@ async def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - async def create_action_group(self, action_group, body, params=None, headers=None): + async def create_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified action group. + + + :arg action_group: The name of the action group to create or + replace """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -103,9 +158,17 @@ async def create_action_group(self, action_group, body, params=None, headers=Non ) @query_params() - async def patch_action_group(self, action_group, body, params=None, headers=None): + async def patch_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an action group. + + """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -120,25 +183,39 @@ async def patch_action_group(self, action_group, body, params=None, headers=None ) @query_params() - async def patch_action_groups(self, body, params=None, headers=None): + async def patch_action_groups( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups", params=params, headers=headers, body=body, ) @query_params() - async def get_user(self, username, params=None, headers=None): + async def get_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves one user. + Retrieve one internal user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -151,21 +228,33 @@ async def get_user(self, username, params=None, headers=None): ) @query_params() - async def get_users(self, params=None, headers=None): + async def get_users( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves all users. + Retrieve all internal users. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, ) @query_params() - async def delete_user(self, username, params=None, headers=None): + async def delete_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified user. + Delete the specified user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -178,9 +267,17 @@ async def delete_user(self, username, params=None, headers=None): ) @query_params() - async def create_user(self, username, body, params=None, headers=None): + async def create_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -195,9 +292,17 @@ async def create_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_user(self, username, body, params=None, headers=None): + async def patch_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an internal user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -212,25 +317,39 @@ async def patch_user(self, username, body, params=None, headers=None): ) @query_params() - async def patch_users(self, body, params=None, headers=None): + async def patch_users( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, body=body, ) @query_params() - async def get_role(self, role, params=None, headers=None): + async def get_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -243,21 +362,30 @@ async def get_role(self, role, params=None, headers=None): ) @query_params() - async def get_roles(self, params=None, headers=None): + async def get_roles( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all roles. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "roles"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/roles/", params=params, headers=headers ) @query_params() - async def delete_role(self, role, params=None, headers=None): + async def delete_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified role. + Delete the specified role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -270,9 +398,17 @@ async def delete_role(self, role, params=None, headers=None): ) @query_params() - async def create_role(self, role, body, params=None, headers=None): + async def create_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -287,9 +423,17 @@ async def create_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role(self, role, body, params=None, headers=None): + async def patch_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -304,25 +448,39 @@ async def patch_role(self, role, body, params=None, headers=None): ) @query_params() - async def patch_roles(self, body, params=None, headers=None): + async def patch_roles( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "roles"), + "/_plugins/_security/api/roles", params=params, headers=headers, body=body, ) @query_params() - async def get_role_mapping(self, role, params=None, headers=None): + async def get_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -335,21 +493,33 @@ async def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def get_role_mappings(self, params=None, headers=None): + async def get_role_mappings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all role mappings. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, ) @query_params() - async def delete_role_mapping(self, role, params=None, headers=None): + async def delete_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -362,9 +532,17 @@ async def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - async def create_role_mapping(self, role, body, params=None, headers=None): + async def create_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -379,9 +557,17 @@ async def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mapping(self, role, body, params=None, headers=None): + async def patch_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -396,25 +582,39 @@ async def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - async def patch_role_mappings(self, body, params=None, headers=None): + async def patch_role_mappings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, body=body, ) @query_params() - async def get_tenant(self, tenant, params=None, headers=None): + async def get_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -427,21 +627,30 @@ async def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def get_tenants(self, params=None, headers=None): + async def get_tenants( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all tenants. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "tenants"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/tenants/", params=params, headers=headers ) @query_params() - async def delete_tenant(self, tenant, params=None, headers=None): + async def delete_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified tenant. + Delete the specified tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -454,9 +663,17 @@ async def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - async def create_tenant(self, tenant, body, params=None, headers=None): + async def create_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -471,9 +688,17 @@ async def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenant(self, tenant, body, params=None, headers=None): + async def patch_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify a single tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -488,60 +713,86 @@ async def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - async def patch_tenants(self, body, params=None, headers=None): + async def patch_tenants( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "tenants"), + "/_plugins/_security/api/tenants/", params=params, headers=headers, body=body, ) @query_params() - async def get_configuration(self, params=None, headers=None): + async def get_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the current Security plugin configuration in JSON format. + Returns the current Security plugin configuration in JSON format. + """ return await self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, ) @query_params() - async def update_configuration(self, body, params=None, headers=None): + async def update_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the current Security plugin configuration in JSON format. + Adds or updates the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "securityconfig", "config"), + "/_plugins/_security/api/securityconfig/config", params=params, headers=headers, body=body, ) @query_params() - async def patch_configuration(self, body, params=None, headers=None): + async def patch_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Updates the existing configuration using the REST API. + A PATCH call is used to update the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, body=body, @@ -549,10 +800,15 @@ async def patch_configuration(self, body, params=None, headers=None): @query_params() async def get_distinguished_names( - self, cluster_name=None, params=None, headers=None - ): + self, + cluster_name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. + + """ return await self.transport.perform_request( "GET", @@ -563,14 +819,22 @@ async def get_distinguished_names( @query_params() async def update_distinguished_names( - self, cluster_name, body, params=None, headers=None - ): + self, + cluster_name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Adds or updates the specified distinguished names in the cluster's or node's allow list. + Adds or updates the specified distinguished names in the cluster’s or node’s + allow list. + + """ - for param in (cluster_name, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") + if cluster_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'cluster_name'." + ) return await self.transport.perform_request( "PUT", @@ -581,13 +845,21 @@ async def update_distinguished_names( ) @query_params() - async def delete_distinguished_names(self, cluster_name, params=None, headers=None): + async def delete_distinguished_names( + self, + cluster_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes all distinguished names in the specified cluster's or node's allow list. + Deletes all distinguished names in the specified cluster’s or node’s allow + list. + + """ if cluster_name in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'cluster-name'." + "Empty value passed for a required argument 'cluster_name'." ) return await self.transport.perform_request( @@ -598,106 +870,159 @@ async def delete_distinguished_names(self, cluster_name, params=None, headers=No ) @query_params() - async def get_certificates(self, params=None, headers=None): + async def get_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the cluster's security certificates. + Retrieves the cluster’s security certificates. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "ssl", "certs"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/ssl/certs", params=params, headers=headers ) @query_params() - async def reload_transport_certificates(self, params=None, headers=None): + async def reload_transport_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload transport layer communication certificates. + """ return await self.transport.perform_request( "PUT", - _make_path( - "_opendistro", "_security", "api", "ssl", "transport", "reloadcerts" - ), + "/_plugins/_security/api/ssl/transport/reloadcerts", params=params, headers=headers, ) @query_params() - async def reload_http_certificates(self, params=None, headers=None): + async def reload_http_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload HTTP layer communication certificates. + """ return await self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "ssl", "http", "reloadcerts"), + "/_plugins/_security/api/ssl/http/reloadcerts", params=params, headers=headers, ) @query_params() - async def flush_cache(self, params=None, headers=None): + async def flush_cache( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. + """ return await self.transport.perform_request( - "DELETE", - _make_path("_plugins", "_security", "api", "cache"), - params=params, - headers=headers, + "DELETE", "/_plugins/_security/api/cache", params=params, headers=headers ) @query_params() - async def health_check(self, params=None, headers=None): + async def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. + """ return await self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "health"), - params=params, - headers=headers, + "GET", "/_plugins/_security/health", params=params, headers=headers ) @query_params() - async def get_audit_configuration(self, params=None, headers=None): + async def get_audit_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - A GET call retrieves the audit configuration. + Retrieves the audit configuration. + """ return await self.transport.perform_request( - "GET", - _make_path("_opendistro", "_security", "api", "audit"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/audit", params=params, headers=headers ) @query_params() - async def update_audit_config(self, body, params=None, headers=None): + async def update_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - A PUT call updates the audit configuration. + Updates the audit configuration. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "audit", "config"), + "/_plugins/_security/api/audit/config", params=params, headers=headers, body=body, ) @query_params() - async def patch_audit_configuration(self, body, params=None, headers=None): + async def patch_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. + + + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return await self.transport.perform_request( + "PATCH", + "/_plugins/_security/api/audit", + params=params, + headers=headers, + body=body, + ) + + @query_params() + async def patch_distinguished_names( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Bulk update of distinguished names. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return await self.transport.perform_request( "PATCH", - _make_path("_opendistro", "_security", "api", "audit"), + "/_plugins/_security/api/nodesdn", params=params, headers=headers, body=body, diff --git a/opensearchpy/_async/client/security.pyi b/opensearchpy/_async/client/security.pyi deleted file mode 100644 index 77239296..00000000 --- a/opensearchpy/_async/client/security.pyi +++ /dev/null @@ -1,206 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient - -class SecurityClient(NamespacedClient): - async def get_account_details( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def change_password( - self, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def get_action_group( - self, - action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def get_action_groups( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def delete_action_group( - self, - action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def create_action_group( - self, - action_group: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_action_group( - self, - action_group: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_action_groups( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_user( - self, - username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def get_users( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def delete_user( - self, - username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def create_user( - self, - username: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_user( - self, - username: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_users( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_roles( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def delete_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def create_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_roles( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_role_mappings( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def delete_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def create_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_role_mappings( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def get_tenants( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def delete_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def create_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def patch_tenants( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def update_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_distinguished_names( - self, - cluster_name: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def update_distinguished_names( - self, - cluster_name: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def delete_distinguished_names( - self, - cluster_name: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Any: ... - async def get_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def reload_transport_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def reload_http_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def flush_cache( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def health_check( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def get_audit_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def update_audit_config( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... - async def patch_audit_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Any: ... diff --git a/opensearchpy/_async/client/snapshot.py b/opensearchpy/_async/client/snapshot.py index 4f2acd6a..97ffec72 100644 --- a/opensearchpy/_async/client/snapshot.py +++ b/opensearchpy/_async/client/snapshot.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,25 +26,45 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") - async def create(self, repository, snapshot, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") + async def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a snapshot in a repository. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: The snapshot definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -57,18 +78,25 @@ async def create(self, repository, snapshot, body=None, params=None, headers=Non body=body, ) - @query_params("master_timeout", "cluster_manager_timeout") - async def delete(self, repository, snapshot, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + async def delete( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -82,33 +110,31 @@ async def delete(self, repository, snapshot, params=None, headers=None): ) @query_params( - "ignore_unavailable", - "include_repository", - "index_details", - "master_timeout", - "cluster_manager_timeout", - "verbose", + "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - async def get(self, repository, snapshot, params=None, headers=None): + async def get( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg include_repository: Whether to include the repository name - in the snapshot info. Defaults to true. - :arg index_details: Whether to include details of each index in - the snapshot, if those details are available. Defaults to false. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg verbose: Whether to show verbose snapshot info or only show - the basic info found in the repository index blob + the basic info found in the repository index blob. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -121,19 +147,25 @@ async def get(self, repository, snapshot, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def delete_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def delete_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a repository. :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -145,38 +177,51 @@ async def delete_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - async def get_repository(self, repository=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + async def get_repository( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a repository. - :arg repository: A comma-separated list of repository names + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout", "verify") - async def create_repository(self, repository, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") + async def create_repository( + self, + repository: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a repository. - :arg repository: A repository name + :arg repository: Repository name. :arg body: The repository definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout - :arg verify: Whether to verify the repository after creation + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + :arg verify: Whether to verify the repository after creation. """ for param in (repository, body): if param in SKIP_IN_PATH: @@ -190,21 +235,29 @@ async def create_repository(self, repository, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") - async def restore(self, repository, snapshot, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") + async def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: Details of what to restore - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -218,21 +271,28 @@ async def restore(self, repository, snapshot, body=None, params=None, headers=No body=body, ) - @query_params("ignore_unavailable", "master_timeout", "cluster_manager_timeout") - async def status(self, repository=None, snapshot=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") + async def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the status of a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return await self.transport.perform_request( "GET", @@ -241,18 +301,24 @@ async def status(self, repository=None, snapshot=None, params=None, headers=None headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def verify_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def verify_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Verifies a repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -264,18 +330,24 @@ async def verify_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - async def cleanup_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + async def cleanup_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes stale data from repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -287,22 +359,29 @@ async def cleanup_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") async def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. - :arg repository: A repository name - :arg snapshot: The name of the snapshot to clone from - :arg target_snapshot: The name of the cloned snapshot to create + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg target_snapshot: The name of the cloned snapshot to create. :arg body: The snapshot clone definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: @@ -315,56 +394,3 @@ async def clone( headers=headers, body=body, ) - - @query_params( - "blob_count", - "concurrency", - "detailed", - "early_read_node_count", - "max_blob_size", - "max_total_data_size", - "rare_action_probability", - "rarely_abort_writes", - "read_node_count", - "seed", - "timeout", - ) - async def repository_analyze(self, repository, params=None, headers=None): - """ - Analyzes a repository for correctness and performance - - - :arg repository: A repository name - :arg blob_count: Number of blobs to create during the test. - Defaults to 100. - :arg concurrency: Number of operations to run concurrently - during the test. Defaults to 10. - :arg detailed: Whether to return detailed results or a summary. - Defaults to 'false' so that only the summary is returned. - :arg early_read_node_count: Number of nodes on which to perform - an early read on a blob, i.e. before writing has completed. Early reads - are rare actions so the 'rare_action_probability' parameter is also - relevant. Defaults to 2. - :arg max_blob_size: Maximum size of a blob to create during the - test, e.g '1gb' or '100mb'. Defaults to '10mb'. - :arg max_total_data_size: Maximum total size of all blobs to - create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. - :arg rare_action_probability: Probability of taking a rare - action such as an early read or an overwrite. Defaults to 0.02. - :arg rarely_abort_writes: Whether to rarely abort writes before - they complete. Defaults to 'true'. - :arg read_node_count: Number of nodes on which to read a blob - after writing. Defaults to 10. - :arg seed: Seed for the random number generator used to create - the test workload. Defaults to a random value. - :arg timeout: Explicit operation timeout. Defaults to '30s'. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'repository'.") - - return await self.transport.perform_request( - "POST", - _make_path("_snapshot", repository, "_analyze"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/_async/client/snapshot.pyi b/opensearchpy/_async/client/snapshot.pyi deleted file mode 100644 index 2167c97f..00000000 --- a/opensearchpy/_async/client/snapshot.pyi +++ /dev/null @@ -1,292 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - async def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete( - self, - repository: Any, - snapshot: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - repository: Any, - snapshot: Any, - *, - ignore_unavailable: Optional[Any] = ..., - include_repository: Optional[Any] = ..., - index_details: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def delete_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get_repository( - self, - *, - repository: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def create_repository( - self, - repository: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def verify_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cleanup_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def repository_analyze( - self, - repository: Any, - *, - blob_count: Optional[Any] = ..., - concurrency: Optional[Any] = ..., - detailed: Optional[Any] = ..., - early_read_node_count: Optional[Any] = ..., - max_blob_size: Optional[Any] = ..., - max_total_data_size: Optional[Any] = ..., - rare_action_probability: Optional[Any] = ..., - rarely_abort_writes: Optional[Any] = ..., - read_node_count: Optional[Any] = ..., - seed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/tasks.py b/opensearchpy/_async/client/tasks.py index 2b49ddc0..39aefe93 100644 --- a/opensearchpy/_async/client/tasks.py +++ b/opensearchpy/_async/client/tasks.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,7 +26,18 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + import warnings +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -40,57 +52,58 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - async def list(self, params=None, headers=None): + async def list( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of tasks. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) + :arg detailed: Return detailed task information. Default is + false. :arg group_by: Group tasks by nodes or parent/child - relationships Valid choices: nodes, parents, none Default: nodes - :arg nodes: A comma-separated list of node IDs or names to limit + relationships. Valid choices are nodes, parents, none. + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - async def cancel(self, task_id=None, params=None, headers=None): + async def cancel( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Cancel the task with specified task id - (node_id:task_number) - :arg actions: A comma-separated list of actions that should be + (node_id:task_number). + :arg actions: Comma-separated list of actions that should be cancelled. Leave empty to cancel all. - :arg nodes: A comma-separated list of node IDs or names to limit + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. - :arg wait_for_completion: Should the request block until the - cancellation of the task and its descendant tasks is completed. Defaults - to false + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return await self.transport.perform_request( "POST", @@ -100,21 +113,21 @@ async def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - async def get(self, task_id=None, params=None, headers=None): + async def get( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a task. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Return the task with specified id - (node_id:task_number) - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + (node_id:task_number). + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/opensearchpy/_async/client/tasks.pyi b/opensearchpy/_async/client/tasks.pyi deleted file mode 100644 index ae777158..00000000 --- a/opensearchpy/_async/client/tasks.pyi +++ /dev/null @@ -1,94 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - async def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - async def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/_async/client/utils.py b/opensearchpy/_async/client/utils.py index b9ea1894..45ad552b 100644 --- a/opensearchpy/_async/client/utils.py +++ b/opensearchpy/_async/client/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -34,3 +35,13 @@ _normalize_hosts, query_params, ) + +__all__ = [ + "SKIP_IN_PATH", + "NamespacedClient", + "_make_path", + "query_params", + "_bulk_body", + "_escape", + "_normalize_hosts", +] diff --git a/opensearchpy/_async/client/utils.pyi b/opensearchpy/_async/client/utils.pyi deleted file mode 100644 index bf88f587..00000000 --- a/opensearchpy/_async/client/utils.pyi +++ /dev/null @@ -1,40 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ...client.utils import SKIP_IN_PATH as SKIP_IN_PATH -from ...client.utils import _bulk_body as _bulk_body -from ...client.utils import _escape as _escape -from ...client.utils import _make_path as _make_path # noqa -from ...client.utils import _normalize_hosts as _normalize_hosts -from ...client.utils import query_params as query_params -from ..client import AsyncOpenSearch -from ..transport import AsyncTransport - -class NamespacedClient: - client: AsyncOpenSearch - def __init__(self, client: AsyncOpenSearch) -> None: ... - @property - def transport(self) -> AsyncTransport: ... diff --git a/opensearchpy/_async/compat.py b/opensearchpy/_async/compat.py index d9c411d4..2ba1b980 100644 --- a/opensearchpy/_async/compat.py +++ b/opensearchpy/_async/compat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -38,7 +39,7 @@ from asyncio import get_running_loop except ImportError: - def get_running_loop(): + def get_running_loop() -> asyncio.AbstractEventLoop: loop = asyncio.get_event_loop() if not loop.is_running(): raise RuntimeError("no running event loop") diff --git a/opensearchpy/_async/compat.pyi b/opensearchpy/_async/compat.pyi deleted file mode 100644 index 60b54b86..00000000 --- a/opensearchpy/_async/compat.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import asyncio - -def get_running_loop() -> asyncio.AbstractEventLoop: ... diff --git a/opensearchpy/_async/helpers/__init__.py b/opensearchpy/_async/helpers/__init__.py index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/_async/helpers/__init__.py +++ b/opensearchpy/_async/helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/helpers/actions.py b/opensearchpy/_async/helpers/actions.py index 323a6668..c85b2ac8 100644 --- a/opensearchpy/_async/helpers/actions.py +++ b/opensearchpy/_async/helpers/actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -31,6 +32,18 @@ import asyncio import logging +from typing import ( + Any, + AsyncGenerator, + AsyncIterable, + Collection, + Iterable, + List, + Optional, + Tuple, + TypeVar, + Union, +) from ...compat import map from ...exceptions import TransportError @@ -42,10 +55,12 @@ ) from ...helpers.errors import ScanError -logger = logging.getLogger("opensearchpy.helpers") +logger: logging.Logger = logging.getLogger("opensearchpy.helpers") -async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +async def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> AsyncGenerator[Any, None]: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -63,15 +78,15 @@ async def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): async def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Send a bulk request to opensearch and process the output. """ @@ -100,21 +115,26 @@ async def _process_bulk_chunk( yield item -def aiter(x): +T = TypeVar("T") + + +def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> Any: """Turns an async iterable or iterable into an async iterator""" if hasattr(x, "__anext__"): return x elif hasattr(x, "__aiter__"): return x.__aiter__() - async def f(): + async def f() -> Any: for item in x: yield item return f().__aiter__() -async def azip(*iterables): +async def azip( + *iterables: Union[Iterable[T], AsyncIterable[T]] +) -> AsyncGenerator[Tuple[T, ...], None]: """Zips async iterables and iterables into an async iterator with the same behavior as zip() """ @@ -127,21 +147,21 @@ async def azip(*iterables): async def async_streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: Union[float, int] = 2, + max_backoff: Union[float, int] = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> AsyncGenerator[Tuple[bool, Any], None]: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -176,7 +196,7 @@ async def async_streaming_bulk( :arg ignore_status: list of HTTP status code that you want to ignore """ - async def map_actions(): + async def map_actions() -> Any: async for item in aiter(actions): yield expand_action_callback(item) @@ -184,7 +204,8 @@ async def map_actions(): map_actions(), chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: await asyncio.sleep( min(max_backoff, initial_backoff * 2 ** (attempt - 1)) @@ -236,8 +257,13 @@ async def map_actions(): async def async_bulk( - client, actions, stats_only=False, ignore_status=(), *args, **kwargs -): + client: Any, + actions: Union[Iterable[Any], AsyncIterable[Any]], + stats_only: bool = False, + ignore_status: Optional[Union[int, Collection[int]]] = (), + *args: Any, + **kwargs: Any +) -> Tuple[int, Union[int, List[Any]]]: """ Helper for the :meth:`~opensearchpy.AsyncOpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and @@ -273,7 +299,7 @@ async def async_bulk( # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - async for ok, item in async_streaming_bulk( + async for ok, item in async_streaming_bulk( # type: ignore client, actions, ignore_status=ignore_status, *args, **kwargs ): # go through request-response pairs and detect failures @@ -288,17 +314,17 @@ async def async_bulk( async def async_scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: str = "5m", + raise_on_error: bool = True, + preserve_order: bool = False, + size: int = 1000, + request_timeout: Any = None, + clear_scroll: bool = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.AsyncOpenSearch.scroll` api - a simple iterator that @@ -408,16 +434,16 @@ async def async_scan( async def async_reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Union[str, Collection[str]], + target_index: str, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Tuple[int, Union[int, List[Any]]]: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -453,7 +479,7 @@ async def async_reindex( client, query=query, index=source_index, scroll=scroll, **scan_kwargs ) - async def _change_doc_index(hits, index): + async def _change_doc_index(hits: Any, index: Any) -> Any: async for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/_async/helpers/actions.pyi b/opensearchpy/_async/helpers/actions.pyi deleted file mode 100644 index be000ae8..00000000 --- a/opensearchpy/_async/helpers/actions.pyi +++ /dev/null @@ -1,114 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AsyncGenerator, - AsyncIterable, - Callable, - Collection, - Dict, - Iterable, - List, - Mapping, - Optional, - Tuple, - TypeVar, - Union, -) - -from ...serializer import Serializer -from ..client import AsyncOpenSearch - -logger: logging.Logger - -T = TypeVar("T") - -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> AsyncGenerator[Any, None]: ... -def _process_bulk_chunk( - client: AsyncOpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -def aiter(x: Union[Iterable[T], AsyncIterable[T]]) -> AsyncGenerator[T, None]: ... -def azip( - *iterables: Union[Iterable[T], AsyncIterable[T]] -) -> AsyncGenerator[Tuple[T, ...], None]: ... -def async_streaming_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> AsyncGenerator[Tuple[bool, Any], None]: ... -async def async_bulk( - client: AsyncOpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - stats_only: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, Union[int, List[Any]]]: ... -def async_scan( - client: AsyncOpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> AsyncGenerator[int, None]: ... -async def async_reindex( - client: AsyncOpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[AsyncOpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/_async/helpers/document.py b/opensearchpy/_async/helpers/document.py index 7f796a86..83349f7e 100644 --- a/opensearchpy/_async/helpers/document.py +++ b/opensearchpy/_async/helpers/document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,15 +8,13 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Optional, Tuple, Type from six import add_metaclass +from opensearchpy._async.client import AsyncOpenSearch from opensearchpy._async.helpers.index import AsyncIndex from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy.connection.async_connections import get_connection @@ -34,7 +33,12 @@ class AsyncIndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(AsyncIndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -45,7 +49,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -71,25 +75,27 @@ class AsyncDocument(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> bool: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - async def _get_connection(cls, using=None): + async def _get_connection(cls, using: Optional[AsyncOpenSearch] = None) -> Any: return await get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - async def init(cls, index=None, using=None): + async def init( + cls: Any, index: Optional[str] = None, using: Optional[AsyncOpenSearch] = None + ) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -98,7 +104,9 @@ async def init(cls, index=None, using=None): i = i.clone(name=index) await i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index( + self, index: Optional[str] = None, required: Optional[bool] = True + ) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -109,7 +117,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -120,7 +128,7 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search(cls, using: Any = None, index: Any = None) -> AsyncSearch: """ Create an :class:`~opensearchpy.AsyncSearch` instance that will search over this ``Document``. @@ -130,7 +138,13 @@ def search(cls, using=None, index=None): ) @classmethod - async def get(cls, id, using=None, index=None, **kwargs): + async def get( # type: ignore + cls, + id: Any, + using: Any = None, + index: Any = None, + **kwargs: Any, + ) -> Any: """ Retrieve a single document from opensearch using its ``id``. @@ -149,7 +163,13 @@ async def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - async def exists(cls, id, using=None, index=None, **kwargs): + async def exists( + cls, + id: str, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any, + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -166,13 +186,19 @@ async def exists(cls, id, using=None, index=None, **kwargs): @classmethod async def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Any, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + raise_on_error: Optional[bool] = True, + missing: Optional[str] = "none", + **kwargs: Any, + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -196,7 +222,9 @@ async def mget( } results = await opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -229,7 +257,12 @@ async def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - async def delete(self, using=None, index=None, **kwargs): + async def delete( + self, + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + **kwargs: Any, + ) -> Any: """ Delete the instance in opensearch. @@ -252,7 +285,9 @@ async def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) await opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict( # type: ignore + self, include_meta: Optional[bool] = False, skip_empty: Optional[bool] = True + ) -> Any: """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -263,7 +298,7 @@ def to_dict(self, include_meta=False, skip_empty=True): ``[]``, ``{}``) to be left on the document. Those values will be stripped out otherwise as they make no difference in opensearch. """ - d = super(AsyncDocument, self).to_dict(skip_empty=skip_empty) + d = super(AsyncDocument, self).to_dict(skip_empty) if not include_meta: return d @@ -279,19 +314,19 @@ def to_dict(self, include_meta=False, skip_empty=True): async def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + detect_noop: Optional[bool] = True, + doc_as_upsert: Optional[bool] = False, + refresh: Optional[bool] = False, + retry_on_conflict: Optional[bool] = None, + script: Any = None, + script_id: Optional[str] = None, + scripted_upsert: Optional[bool] = False, + upsert: Optional[bool] = None, + return_doc_meta: Optional[bool] = False, + **fields: Any, + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -320,7 +355,7 @@ async def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -384,13 +419,13 @@ async def update( async def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Optional[AsyncOpenSearch] = None, + index: Optional[str] = None, + validate: Optional[bool] = True, + skip_empty: Optional[bool] = True, + return_doc_meta: Optional[bool] = False, + **kwargs: Any, + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this @@ -427,7 +462,7 @@ async def save( meta = await opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/_async/helpers/document.pyi b/opensearchpy/_async/helpers/document.pyi deleted file mode 100644 index 71eb4ef4..00000000 --- a/opensearchpy/_async/helpers/document.pyi +++ /dev/null @@ -1,14 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.document import DocumentMeta -from opensearchpy.helpers.utils import ObjectBase - -class AsyncIndexMeta(DocumentMeta): ... -class AsyncDocument(ObjectBase): ... diff --git a/opensearchpy/_async/helpers/faceted_search.py b/opensearchpy/_async/helpers/faceted_search.py index c6ca4385..1eb5a677 100644 --- a/opensearchpy/_async/helpers/faceted_search.py +++ b/opensearchpy/_async/helpers/faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,6 +9,8 @@ # GitHub history for details. +from typing import Any + from six import iteritems, itervalues from opensearchpy._async.helpers.search import AsyncSearch @@ -57,38 +60,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} - using = "default" + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} + using: str = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.AsyncSearch` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - async def count(self): + async def count(self) -> Any: return await self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> None: """ Add a filter for a facet. """ @@ -110,7 +113,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -120,7 +123,7 @@ def search(self): s = AsyncSearch(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -133,7 +136,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -149,7 +152,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -162,7 +165,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -170,7 +173,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -178,7 +181,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``AsyncSearch`` object. """ @@ -191,7 +194,7 @@ def build_search(self): self.aggregate(s) return s - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/_async/helpers/faceted_search.pyi b/opensearchpy/_async/helpers/faceted_search.pyi deleted file mode 100644 index 443e87c5..00000000 --- a/opensearchpy/_async/helpers/faceted_search.pyi +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncFacetedSearch(object): ... diff --git a/opensearchpy/_async/helpers/index.py b/opensearchpy/_async/helpers/index.py index c3e5a371..4f2a9918 100644 --- a/opensearchpy/_async/helpers/index.py +++ b/opensearchpy/_async/helpers/index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy._async.helpers.mapping import AsyncMapping from opensearchpy._async.helpers.search import AsyncSearch from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery @@ -17,7 +20,14 @@ class AsyncIndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = AsyncIndex(template, **kwargs) else: @@ -31,17 +41,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: opensearch = await get_connection(using or self._index._using) return await opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -49,25 +59,27 @@ async def save(self, using=None): class AsyncIndex(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = AsyncMapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # AsyncDocument._index be that? @@ -75,7 +87,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -84,7 +96,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -93,12 +105,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - async def load_mappings(self, using=None): + async def load_mappings(self, using: Any = None) -> None: await self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -122,14 +134,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - async def _get_connection(self, using=None): + async def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return await get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> None: """ Associate a mapping (an instance of :class:`~opensearchpy.AsyncMapping`) with this index. @@ -138,7 +150,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.AsyncDocument` subclass with an index. This means that, when this index is created, it will contain the @@ -169,7 +181,7 @@ class Post(AsyncDocument): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> "AsyncIndex": """ Add settings to the index:: @@ -182,7 +194,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> "AsyncIndex": """ Add aliases to the index definition:: @@ -192,7 +204,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -219,14 +231,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -238,7 +250,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncSearch` object searching over the index (or all the indices belonging to this template) and its @@ -248,7 +260,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Any = None) -> Any: """ Return a :class:`~opensearchpy.AsyncUpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -262,7 +274,7 @@ def updateByQuery(self, using=None): index=self._name, ) - async def create(self, using=None, **kwargs): + async def create(self, using: Any = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -273,13 +285,13 @@ async def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - async def is_closed(self, using=None): + async def is_closed(self, using: Any = None) -> Any: state = await (await self._get_connection(using)).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - async def save(self, using=None): + async def save(self, using: Any = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -333,7 +345,7 @@ async def save(self, using=None): if mappings: await self.put_mapping(using=using, body=mappings) - async def analyze(self, using=None, **kwargs): + async def analyze(self, using: Any = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -345,7 +357,7 @@ async def analyze(self, using=None, **kwargs): index=self._name, **kwargs ) - async def refresh(self, using=None, **kwargs): + async def refresh(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -356,7 +368,7 @@ async def refresh(self, using=None, **kwargs): index=self._name, **kwargs ) - async def flush(self, using=None, **kwargs): + async def flush(self, using: Any = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -367,7 +379,7 @@ async def flush(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get(self, using=None, **kwargs): + async def get(self, using: Any = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -378,7 +390,7 @@ async def get(self, using=None, **kwargs): index=self._name, **kwargs ) - async def open(self, using=None, **kwargs): + async def open(self, using: Any = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -389,7 +401,7 @@ async def open(self, using=None, **kwargs): index=self._name, **kwargs ) - async def close(self, using=None, **kwargs): + async def close(self, using: Any = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -400,7 +412,7 @@ async def close(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete(self, using=None, **kwargs): + async def delete(self, using: Any = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -411,7 +423,7 @@ async def delete(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists(self, using=None, **kwargs): + async def exists(self, using: Any = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -422,7 +434,7 @@ async def exists(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_mapping(self, using=None, **kwargs): + async def put_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -433,7 +445,7 @@ async def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_mapping(self, using=None, **kwargs): + async def get_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -444,7 +456,7 @@ async def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_field_mapping(self, using=None, **kwargs): + async def get_field_mapping(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve mapping definition of a specific field. @@ -455,7 +467,7 @@ async def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_alias(self, using=None, **kwargs): + async def put_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -466,7 +478,7 @@ async def put_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def exists_alias(self, using=None, **kwargs): + async def exists_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -477,7 +489,7 @@ async def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_alias(self, using=None, **kwargs): + async def get_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -488,7 +500,7 @@ async def get_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def delete_alias(self, using=None, **kwargs): + async def delete_alias(self, using: Any = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -499,7 +511,7 @@ async def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_settings(self, using=None, **kwargs): + async def get_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -510,7 +522,7 @@ async def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def put_settings(self, using=None, **kwargs): + async def put_settings(self, using: Any = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -521,7 +533,7 @@ async def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - async def stats(self, using=None, **kwargs): + async def stats(self, using: Any = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -532,7 +544,7 @@ async def stats(self, using=None, **kwargs): index=self._name, **kwargs ) - async def segments(self, using=None, **kwargs): + async def segments(self, using: Any = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -544,7 +556,7 @@ async def segments(self, using=None, **kwargs): index=self._name, **kwargs ) - async def validate_query(self, using=None, **kwargs): + async def validate_query(self, using: Any = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -555,7 +567,7 @@ async def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - async def clear_cache(self, using=None, **kwargs): + async def clear_cache(self, using: Any = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -566,7 +578,7 @@ async def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - async def recovery(self, using=None, **kwargs): + async def recovery(self, using: Any = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -578,7 +590,7 @@ async def recovery(self, using=None, **kwargs): index=self._name, **kwargs ) - async def upgrade(self, using=None, **kwargs): + async def upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -589,7 +601,7 @@ async def upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def get_upgrade(self, using=None, **kwargs): + async def get_upgrade(self, using: Any = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -600,7 +612,7 @@ async def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shard_stores(self, using=None, **kwargs): + async def shard_stores(self, using: Any = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -614,7 +626,7 @@ async def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - async def forcemerge(self, using=None, **kwargs): + async def forcemerge(self, using: Any = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -632,7 +644,7 @@ async def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - async def shrink(self, using=None, **kwargs): + async def shrink(self, using: Any = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/_async/helpers/index.pyi b/opensearchpy/_async/helpers/index.pyi deleted file mode 100644 index 5b9d8720..00000000 --- a/opensearchpy/_async/helpers/index.pyi +++ /dev/null @@ -1,11 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncIndexTemplate(object): ... -class AsyncIndex(object): ... diff --git a/opensearchpy/_async/helpers/mapping.py b/opensearchpy/_async/helpers/mapping.py index 1ccec472..dd560564 100644 --- a/opensearchpy/_async/helpers/mapping.py +++ b/opensearchpy/_async/helpers/mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,12 +8,9 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any from six import iteritems @@ -22,25 +20,28 @@ class AsyncMapping(object): - def __init__(self): + _meta: Any + properties: Properties + + def __init__(self) -> None: self.properties = Properties() self._meta = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = AsyncMapping() m.properties._params = self.properties._params.copy() return m @classmethod - async def from_opensearch(cls, index, using="default"): + async def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() await m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: str) -> Any: field = self nested = [] parts = field_path.split(".") @@ -53,18 +54,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -90,20 +91,20 @@ def _collect_analysis(self): return analysis - async def save(self, index, using="default"): + async def save(self, index: Any, using: str = "default") -> Any: from opensearchpy._async.helpers.index import AsyncIndex index = AsyncIndex(index, using=using) index.mapping(self) return await index.save() - async def update_from_opensearch(self, index, using="default"): + async def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = await get_connection(using) raw = await opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -115,7 +116,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -132,20 +133,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "AsyncMapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "AsyncMapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -155,7 +156,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/_async/helpers/mapping.pyi b/opensearchpy/_async/helpers/mapping.pyi deleted file mode 100644 index 61505f42..00000000 --- a/opensearchpy/_async/helpers/mapping.pyi +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncMapping(object): ... diff --git a/opensearchpy/_async/helpers/search.py b/opensearchpy/_async/helpers/search.py index bd6884cf..d844ba29 100644 --- a/opensearchpy/_async/helpers/search.py +++ b/opensearchpy/_async/helpers/search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,6 +9,7 @@ # GitHub history for details. import copy +from typing import Any, Sequence from six import iteritems, string_types @@ -25,7 +27,7 @@ class AsyncSearch(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -39,24 +41,24 @@ def __init__(self, **kwargs): super(AsyncSearch, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} - self._response_class = Response + self._sort: Sequence[Any] = [] + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} + self._response_class: Any = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `AsyncSearch` instance for pagination. @@ -91,7 +93,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncSearch` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -112,7 +114,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -135,7 +137,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -143,7 +145,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncSearch": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -178,7 +180,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -204,7 +206,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -249,7 +251,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -282,7 +284,7 @@ def sort(self, *keys): s._sort.append(k) return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -294,7 +296,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -334,7 +336,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: str, text: str, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -351,7 +353,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -395,7 +397,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def count(self): + async def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -411,7 +413,7 @@ async def count(self): "count" ] - async def execute(self, ignore_cache=False): + async def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -430,7 +432,7 @@ async def execute(self, ignore_cache=False): ) return self._response - async def scan(self): + async def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -448,7 +450,7 @@ async def scan(self): ): yield self._get_result(hit) - async def delete(self): + async def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -468,22 +470,22 @@ class AsyncMultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(AsyncMultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(AsyncMultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.AsyncSearch` object to the request:: @@ -495,7 +497,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -508,7 +510,9 @@ def to_dict(self): return out - async def execute(self, ignore_cache=False, raise_on_error=True): + async def execute( + self, ignore_cache: bool = False, raise_on_error: bool = True + ) -> Any: """ Execute the multi search request and return a list of search results. """ diff --git a/opensearchpy/_async/helpers/search.pyi b/opensearchpy/_async/helpers/search.pyi deleted file mode 100644 index 4fb1cd3c..00000000 --- a/opensearchpy/_async/helpers/search.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from opensearchpy.helpers.search import Request - -class AsyncSearch(Request): ... -class AsyncMultiSearch(Request): ... diff --git a/opensearchpy/_async/helpers/test.py b/opensearchpy/_async/helpers/test.py index c8e43273..9516857c 100644 --- a/opensearchpy/_async/helpers/test.py +++ b/opensearchpy/_async/helpers/test.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,18 +10,16 @@ import os import time +from typing import Any from unittest import SkipTest from opensearchpy import AsyncOpenSearch from opensearchpy.exceptions import ConnectionError -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") -async def get_test_client(nowait=False, **kwargs): +async def get_test_client(nowait: bool = False, **kwargs: Any) -> Any: # construct kwargs from the environment kw = {"timeout": 30} @@ -31,7 +30,7 @@ async def get_test_client(nowait=False, **kwargs): kw["connection_class"] = getattr(async_connection, "AIOHttpConnection") kw.update(kwargs) - client = AsyncOpenSearch(OPENSEARCH_URL, **kw) + client = AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(1 if nowait else 100): diff --git a/opensearchpy/_async/helpers/test.pyi b/opensearchpy/_async/helpers/test.pyi deleted file mode 100644 index 451bfc14..00000000 --- a/opensearchpy/_async/helpers/test.pyi +++ /dev/null @@ -1,19 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any - -from _typeshed import Incomplete - -from opensearchpy import AsyncOpenSearch as AsyncOpenSearch -from opensearchpy.exceptions import ConnectionError as ConnectionError - -OPENSEARCH_URL: Incomplete - -async def get_test_client(nowait: bool = ..., **kwargs: Any) -> Any: ... diff --git a/opensearchpy/_async/helpers/update_by_query.py b/opensearchpy/_async/helpers/update_by_query.py index 322b1488..aeb8e3d2 100644 --- a/opensearchpy/_async/helpers/update_by_query.py +++ b/opensearchpy/_async/helpers/update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from opensearchpy.connection.async_connections import get_connection from opensearchpy.helpers.query import Bool, Q from opensearchpy.helpers.response import UpdateByQueryResponse @@ -17,7 +20,7 @@ class AsyncUpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -31,17 +34,17 @@ def __init__(self, **kwargs): """ super(AsyncUpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `AsyncUpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -62,7 +65,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -75,7 +78,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -83,7 +86,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "AsyncUpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -96,7 +99,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -117,7 +120,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -135,7 +138,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - async def execute(self): + async def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index e60e3f80..f14d5384 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,8 +30,9 @@ import os import ssl import warnings +from typing import Any, Collection, Mapping, Optional, Union -import urllib3 # type: ignore +import urllib3 from ..compat import reraise_exceptions, urlencode from ..connection.base import Connection @@ -40,12 +42,9 @@ ImproperlyConfigured, SSLError, ) -from ._extra_imports import aiohttp, aiohttp_exceptions, yarl +from ._extra_imports import aiohttp, aiohttp_exceptions, yarl # type: ignore from .compat import get_running_loop -# sentinel value for `verify_certs`. -# This is used to detect if a user is passing in a value -# for SSL kwargs if also using an SSLContext. VERIFY_CERTS_DEFAULT = object() SSL_SHOW_WARN_DEFAULT = object() @@ -55,45 +54,48 @@ class AsyncConnection(Connection): async def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() - async def close(self): + async def close(self) -> None: raise NotImplementedError() class AIOHttpConnection(AsyncConnection): + session: aiohttp.ClientSession + ssl_assert_fingerprint: Optional[str] + def __init__( self, - host="localhost", - port=None, - url_prefix="", - timeout=10, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - trust_env=False, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + url_prefix: str = "", + timeout: int = 10, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Any = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + trust_env: Optional[bool] = False, + **kwargs: Any + ) -> None: """ Default connection class for ``AsyncOpenSearch`` using the `aiohttp` library and the http protocol. @@ -223,8 +225,15 @@ def __init__( self._trust_env = trust_env async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -345,14 +354,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -376,9 +385,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/_async/http_aiohttp.pyi b/opensearchpy/_async/http_aiohttp.pyi deleted file mode 100644 index 4dea4317..00000000 --- a/opensearchpy/_async/http_aiohttp.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, Mapping, Optional, Tuple, Union - -from ..connection import Connection -from ._extra_imports import aiohttp # type: ignore - -class AsyncConnection(Connection): - async def perform_request( # type: ignore - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - async def close(self) -> None: ... - -class AIOHttpConnection(AsyncConnection): - session: Optional[aiohttp.ClientSession] - ssl_assert_fingerprint: Optional[str] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: int = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Any = ..., - trust_env: bool = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/_async/plugins/__init__.py b/opensearchpy/_async/plugins/__init__.py index 6c0097cd..22c54ac8 100644 --- a/opensearchpy/_async/plugins/__init__.py +++ b/opensearchpy/_async/plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/_async/plugins/__init__.pyi b/opensearchpy/_async/plugins/__init__.pyi deleted file mode 100644 index 6c0097cd..00000000 --- a/opensearchpy/_async/plugins/__init__.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/_async/plugins/alerting.py b/opensearchpy/_async/plugins/alerting.py index d8b27937..f1cf3ac9 100644 --- a/opensearchpy/_async/plugins/alerting.py +++ b/opensearchpy/_async/plugins/alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,12 +8,19 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any, Union + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - async def search_monitor(self, body, params=None, headers=None): + async def search_monitor( + self, + body: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the search result for a monitor. @@ -27,7 +35,12 @@ async def search_monitor(self, body, params=None, headers=None): ) @query_params() - async def get_monitor(self, monitor_id, params=None, headers=None): + async def get_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific monitor. @@ -41,7 +54,12 @@ async def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - async def run_monitor(self, monitor_id, params=None, headers=None): + async def run_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Runs/Executes a specific monitor. @@ -56,7 +74,12 @@ async def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def create_monitor(self, body=None, params=None, headers=None): + async def create_monitor( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a monitor with inputs, triggers, and actions. @@ -71,7 +94,13 @@ async def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - async def update_monitor(self, monitor_id, body=None, params=None, headers=None): + async def update_monitor( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a monitor's inputs, triggers, and actions. @@ -87,7 +116,12 @@ async def update_monitor(self, monitor_id, body=None, params=None, headers=None) ) @query_params() - async def delete_monitor(self, monitor_id, params=None, headers=None): + async def delete_monitor( + self, + monitor_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific monitor. @@ -101,7 +135,12 @@ async def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - async def get_destination(self, destination_id=None, params=None, headers=None): + async def get_destination( + self, + destination_id: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Returns the details of a specific destination. @@ -117,7 +156,12 @@ async def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - async def create_destination(self, body=None, params=None, headers=None): + async def create_destination( + self, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Creates a destination for slack, mail, or custom-webhook. @@ -133,8 +177,12 @@ async def create_destination(self, body=None, params=None, headers=None): @query_params() async def update_destination( - self, destination_id, body=None, params=None, headers=None - ): + self, + destination_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Updates a destination's inputs, triggers, and actions. @@ -150,7 +198,12 @@ async def update_destination( ) @query_params() - async def delete_destination(self, destination_id, params=None, headers=None): + async def delete_destination( + self, + destination_id: Any, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Deletes a specific destination. @@ -164,7 +217,9 @@ async def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - async def get_alerts(self, params=None, headers=None): + async def get_alerts( + self, params: Union[Any, None] = None, headers: Union[Any, None] = None + ) -> Union[bool, Any]: """ Returns all alerts. @@ -177,7 +232,13 @@ async def get_alerts(self, params=None, headers=None): ) @query_params() - async def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + async def acknowledge_alert( + self, + monitor_id: Any, + body: Union[Any, None] = None, + params: Union[Any, None] = None, + headers: Union[Any, None] = None, + ) -> Union[bool, Any]: """ Acknowledges an alert. diff --git a/opensearchpy/_async/plugins/alerting.pyi b/opensearchpy/_async/plugins/alerting.pyi deleted file mode 100644 index 50392224..00000000 --- a/opensearchpy/_async/plugins/alerting.pyi +++ /dev/null @@ -1,82 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def run_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, - monitor_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, - destination_id: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/plugins/index_management.py b/opensearchpy/_async/plugins/index_management.py index 3be06e6a..bbca4e2f 100644 --- a/opensearchpy/_async/plugins/index_management.py +++ b/opensearchpy/_async/plugins/index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - async def put_policy(self, policy, body=None, params=None, headers=None): + async def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -31,7 +36,9 @@ async def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - async def add_policy(self, index, body=None, params=None, headers=None): + async def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -49,7 +56,9 @@ async def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def get_policy(self, policy, params=None, headers=None): + async def get_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the policy by `policy_id`. @@ -66,7 +75,9 @@ async def get_policy(self, policy, params=None, headers=None): ) @query_params() - async def remove_policy_from_index(self, index, params=None, headers=None): + async def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -83,7 +94,9 @@ async def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - async def change_policy(self, index, body=None, params=None, headers=None): + async def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -101,7 +114,9 @@ async def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - async def retry(self, index, body=None, params=None, headers=None): + async def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -119,7 +134,9 @@ async def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - async def explain_index(self, index, params=None, headers=None): + async def explain_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Gets the current state of the index. @@ -136,7 +153,9 @@ async def explain_index(self, index, params=None, headers=None): ) @query_params() - async def delete_policy(self, policy, params=None, headers=None): + async def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/_async/plugins/index_management.pyi b/opensearchpy/_async/plugins/index_management.pyi deleted file mode 100644 index cd08954d..00000000 --- a/opensearchpy/_async/plugins/index_management.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - async def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - async def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/_async/transport.py b/opensearchpy/_async/transport.py index e93344bc..854f0a06 100644 --- a/opensearchpy/_async/transport.py +++ b/opensearchpy/_async/transport.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,6 +30,10 @@ import logging import sys from itertools import chain +from typing import Any, Collection, Mapping, Optional, Type, Union + +from opensearchpy.connection.base import Connection +from opensearchpy.serializer import Serializer from ..connection_pool import ConnectionPool from ..exceptions import ( @@ -55,25 +60,27 @@ class AsyncTransport(Transport): DEFAULT_CONNECTION_CLASS = AIOHttpConnection + sniffing_task: Any = None + def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Any = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Any = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Any = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Any = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + retry_on_status: Any = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -112,9 +119,9 @@ def __init__( options provided as part of the hosts parameter. """ self.sniffing_task = None - self.loop = None + self.loop: Any = None self._async_init_called = False - self._sniff_on_start_event = None # type: asyncio.Event + self._sniff_on_start_event: Optional[asyncio.Event] = None super(AsyncTransport, self).__init__( hosts=[], @@ -141,7 +148,7 @@ def __init__( self.hosts = hosts self.sniff_on_start = sniff_on_start - async def _async_init(self): + async def _async_init(self) -> None: """This is our stand-in for an async constructor. Everything that was deferred within __init__() should be done here now. @@ -170,7 +177,7 @@ async def _async_init(self): # Since this is the first one we wait for it to complete # in case there's an error it'll get raised here. - await self.sniffing_task + await self.sniffing_task # type: ignore # If the task gets cancelled here it likely means the # transport got closed. @@ -183,7 +190,7 @@ async def _async_init(self): finally: self._sniff_on_start_event.set() - async def _async_call(self): + async def _async_call(self) -> None: """This method is called within any async method of AsyncTransport where the transport is not closing. This will check to see if we should call our _async_init() or create a new sniffing task @@ -204,7 +211,7 @@ async def _async_call(self): if self.loop.time() >= self.last_sniff + self.sniffer_timeout: self.create_sniff_task() - async def _get_node_info(self, conn, initial): + async def _get_node_info(self, conn: Any, initial: Any) -> Any: try: # use small timeout for the sniffing request, should be a fast api call _, headers, node_info = await conn.perform_request( @@ -217,7 +224,7 @@ async def _get_node_info(self, conn, initial): pass return None - async def _get_sniff_data(self, initial=False): + async def _get_sniff_data(self, initial: Any = False) -> Any: previous_sniff = self.last_sniff # reset last_sniff timestamp @@ -226,7 +233,7 @@ async def _get_sniff_data(self, initial=False): # use small timeout for the sniffing request, should be a fast api call timeout = self.sniff_timeout if not initial else None - def _sniff_request(conn): + def _sniff_request(conn: Any) -> Any: return self.loop.create_task( conn.perform_request("GET", "/_nodes/_all/http", timeout=timeout) ) @@ -242,7 +249,7 @@ def _sniff_request(conn): continue tasks.append(_sniff_request(conn)) - done = () + done: Any = () try: while tasks: # The 'loop' keyword is deprecated in 3.8+ so don't @@ -282,7 +289,7 @@ def _sniff_request(conn): for task in chain(done, tasks): task.cancel() - async def sniff_hosts(self, initial=False): + async def sniff_hosts(self, initial: bool = False) -> Any: """Either spawns a sniffing_task which does regular sniffing over time or does a single sniffing session and awaits the results. """ @@ -293,7 +300,7 @@ async def sniff_hosts(self, initial=False): return node_info = await self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes, maybe using an incompatible # transport_schema or host_info_callback blocked all - raise error. @@ -310,7 +317,7 @@ async def sniff_hosts(self, initial=False): if c not in self.connection_pool.connections: await c.close() - def create_sniff_task(self, initial=False): + def create_sniff_task(self, initial: bool = False) -> None: """ Initiate a sniffing task. Make sure we only have one sniff request running at any given time. If a finished sniffing request is around, @@ -326,7 +333,7 @@ def create_sniff_task(self, initial=False): if self.sniffing_task is None: self.sniffing_task = self.loop.create_task(self.sniff_hosts(initial)) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -337,20 +344,29 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.create_sniff_task() - def get_connection(self): + def get_connection(self) -> Any: return self.connection_pool.get_connection() - async def perform_request(self, method, url, headers=None, params=None, body=None): + async def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection - pool, pass all the information to it's perform_request method and + pool, pass all the information to its perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously - marked as dead, mark it as live, resetting it's failure count. + marked as dead, mark it as live, resetting its failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target @@ -412,7 +428,7 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non raise e else: - # connection didn't fail, confirm it's live status + # connection didn't fail, confirm its live status self.connection_pool.mark_live(connection) if method == "HEAD": @@ -424,7 +440,7 @@ async def perform_request(self, method, url, headers=None, params=None, body=Non ) return data - async def close(self): + async def close(self) -> None: """ Explicitly closes connections """ @@ -438,3 +454,6 @@ async def close(self): for connection in self.connection_pool.connections: await connection.close() + + +__all__ = ["TransportError"] diff --git a/opensearchpy/_async/transport.pyi b/opensearchpy/_async/transport.pyi deleted file mode 100644 index cc9406bf..00000000 --- a/opensearchpy/_async/transport.pyi +++ /dev/null @@ -1,90 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from ..connection import Connection -from ..connection_pool import ConnectionPool -from ..serializer import Deserializer, Serializer - -class AsyncTransport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Dict[str, Any] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Dict[str, Any]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - async def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - async def close(self) -> None: ... diff --git a/opensearchpy/_version.py b/opensearchpy/_version.py index a21ba4b7..13c8d5c9 100644 --- a/opensearchpy/_version.py +++ b/opensearchpy/_version.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,4 +25,4 @@ # specific language governing permissions and limitations # under the License. -__versionstr__ = "2.3.0" +__versionstr__: str = "2.3.2" diff --git a/opensearchpy/client/__init__.py b/opensearchpy/client/__init__.py index 1fe0c959..05af6764 100644 --- a/opensearchpy/client/__init__.py +++ b/opensearchpy/client/__init__.py @@ -26,12 +26,24 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + from __future__ import unicode_literals import logging +from typing import Any, Type from ..transport import Transport, TransportError from .cat import CatClient +from .client import Client from .cluster import ClusterClient from .dangling_indices import DanglingIndicesClient from .features import FeaturesClient @@ -40,15 +52,16 @@ from .nodes import NodesClient from .plugins import PluginsClient from .remote import RemoteClient +from .remote_store import RemoteStoreClient from .security import SecurityClient from .snapshot import SnapshotClient from .tasks import TasksClient -from .utils import SKIP_IN_PATH, _bulk_body, _make_path, _normalize_hosts, query_params +from .utils import SKIP_IN_PATH, _bulk_body, _make_path, query_params logger = logging.getLogger("opensearch") -class OpenSearch(object): +class OpenSearch(Client): """ OpenSearch client. Provides a straightforward mapping from Python to OpenSearch REST endpoints. @@ -173,7 +186,19 @@ def default(self, obj): """ - def __init__(self, hosts=None, transport_class=Transport, **kwargs): + # include PIT functions inside _patch.py + from ._patch import ( # type: ignore + create_point_in_time, + delete_point_in_time, + list_all_point_in_time, + ) + + def __init__( + self, + hosts: Any = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: """ :arg hosts: list of nodes, or a single node, we should connect to. Node should be a dictionary ({"host": "localhost", "port": 9200}), @@ -188,7 +213,7 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be :class:`~opensearchpy.Transport` class and, subsequently, to the :class:`~opensearchpy.Connection` instances. """ - self.transport = transport_class(_normalize_hosts(hosts), **kwargs) + super().__init__(hosts, transport_class, **kwargs) # namespaced clients for compatibility with API names self.cat = CatClient(self) @@ -201,15 +226,16 @@ class as kwargs, or a string in the format of ``host[:port]`` which will be self.security = SecurityClient(self) self.snapshot = SnapshotClient(self) self.tasks = TasksClient(self) + self.remote_store = RemoteStoreClient(self) self.features = FeaturesClient(self) self.plugins = PluginsClient(self) - def __repr__(self): + def __repr__(self) -> Any: try: # get a list of all connections - cons = self.transport.hosts + cons: Any = self.transport.hosts # truncate to 5 if there are too many if len(cons) > 5: cons = cons[:5] + ["..."] @@ -218,21 +244,25 @@ def __repr__(self): # probably operating on custom transport and connection_pool, ignore return super(OpenSearch, self).__repr__() - def __enter__(self): + def __enter__(self) -> Any: if hasattr(self.transport, "_async_call"): self.transport._async_call() return self - def __exit__(self, *_): + def __exit__(self, *_: Any) -> None: self.close() - def close(self): + def close(self) -> None: """Closes the Transport and all internal connections""" self.transport.close() # AUTO-GENERATED-API-DEFINITIONS # @query_params() - def ping(self, params=None, headers=None): + def ping( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns whether the cluster is running. @@ -245,7 +275,11 @@ def ping(self, params=None, headers=None): return False @query_params() - def info(self, params=None, headers=None): + def info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the cluster. @@ -263,31 +297,38 @@ def info(self, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def create(self, index, id, body, params=None, headers=None): + def create( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The document :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -312,51 +353,54 @@ def create(self, index, id, body, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def index(self, index, body, id=None, params=None, headers=None): + def index( + self, + index: Any, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Creates or overwrites a document in an index. + Creates or updates a document in an index. - :arg index: The name of the index + :arg index: Index name. :arg body: The document - :arg id: Document ID - :arg if_primary_term: only perform the index operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the index operation if the last - operation that has changed the document has the specified sequence - number + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg op_type: Explicit operation type. Defaults to `index` for requests with an explicit document ID, and to `create`for requests - without an explicit document ID Valid choices: index, create + without an explicit document ID. Valid choices are index, create. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: When true, requires destination to be an - alias. Default is false - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte + alias. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the index operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, body): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( "POST" if id in SKIP_IN_PATH else "PUT", - _make_path(index, doc_type, id), + _make_path(index, "_doc", id), params=params, headers=headers, body=body, @@ -373,36 +417,42 @@ def index(self, index, body, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def bulk(self, body, index=None, params=None, headers=None): + def bulk( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to perform multiple index/update/delete operations in a single request. :arg body: The operation definition and data (action-data pairs), separated by newlines - :arg index: Default index for items which don't provide one + :arg index: Default index for items which don't provide one. :arg _source: True or false to return the _source field or not, or default list of fields to return, can be overridden on each sub- - request + request. :arg _source_excludes: Default list of fields to exclude from - the returned _source field, can be overridden on each sub-request + the returned _source field, can be overridden on each sub-request. :arg _source_includes: Default list of fields to extract and - return from the _source field, can be overridden on each sub-request + return from the _source field, can be overridden on each sub-request. :arg pipeline: The pipeline id to preprocess incoming documents - with + with. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for + do nothing with refreshes. Valid choices are true, false, wait_for. :arg require_alias: Sets require_alias for all incoming - documents. Defaults to unset (false) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + documents. Default is false. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the bulk operation. Defaults - to 1, meaning the primary shard only. Set to `all` for all shard copies, + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total - number of copies for the shard (number of replicas + 1) + number of copies for the shard (number of replicas + 1). Default is 1. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -417,14 +467,20 @@ def bulk(self, body, index=None, params=None, headers=None): ) @query_params() - def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): + def clear_scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Explicitly clears the search context for a scroll. - :arg body: A comma-separated list of scroll IDs to clear if none + :arg body: Comma-separated list of scroll IDs to clear if none was specified via the scroll_id parameter - :arg scroll_id: A comma-separated list of scroll IDs to clear + :arg scroll_id: Comma-separated list of scroll IDs to clear. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -453,42 +509,49 @@ def clear_scroll(self, body=None, scroll_id=None, params=None, headers=None): "routing", "terminate_after", ) - def count(self, body=None, index=None, params=None, headers=None): + def count( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns number of documents matching a query. - :arg body: A query to restrict the results specified with the + :arg body: Query to restrict the results specified with the Query DSL (optional) - :arg index: A comma-separated list of indices to restrict the - results + :arg index: Comma-separated list of indices to restrict the + results. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg min_score: Include only documents with a specific `_score` - value in the result + value in the result. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: A comma-separated list of specific routing values - :arg terminate_after: The maximum count for each shard, upon - reaching which the query execution will terminate early + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Comma-separated list of specific routing values. + :arg terminate_after: The maximum number of documents to collect + for each shard, upon reaching which the query execution will terminate + early. """ return self.transport.perform_request( "POST", @@ -508,42 +571,44 @@ def count(self, body=None, index=None, params=None, headers=None): "version_type", "wait_for_active_shards", ) - def delete(self, index, id, params=None, headers=None): + def delete( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes a document from the index. - :arg index: The name of the index - :arg id: The document ID - :arg if_primary_term: only perform the delete operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the delete operation if the last - operation that has changed the document has the specified sequence - number + :arg index: Index name. + :arg id: Document ID. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg routing: Routing value. + :arg timeout: Operation timeout. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "DELETE", _make_path(index, doc_type, id), params=params, headers=headers + "DELETE", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -581,81 +646,87 @@ def delete(self, index, id, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def delete_by_query(self, index, body, params=None, headers=None): + def delete_by_query( + self, + index: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes documents matching the provided query. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the delete by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) + all documents). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the effected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Refresh the shard containing the document before + performing the operation. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg requests_per_second: The throttle for this request in sub- - requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the delete - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the delete by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the delete by query is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -674,15 +745,20 @@ def delete_by_query(self, index, body, params=None, headers=None): ) @query_params("requests_per_second") - def delete_by_query_rethrottle(self, task_id, params=None, headers=None): + def delete_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Delete By Query operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -694,16 +770,24 @@ def delete_by_query_rethrottle(self, task_id, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_script(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -724,40 +808,44 @@ def delete_script(self, id, params=None, headers=None): "version", "version_type", ) - def exists(self, index, id, params=None, headers=None): + def exists( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "HEAD", _make_path(index, doc_type, id), params=params, headers=headers + "HEAD", _make_path(index, "_doc", id), params=params, headers=headers ) @query_params( @@ -771,29 +859,35 @@ def exists(self, index, id, params=None, headers=None): "version", "version_type", ) - def exists_source(self, index, id, params=None, headers=None): + def exists_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a document source exists in an index. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -819,35 +913,42 @@ def exists_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def explain(self, index, id, body=None, params=None, headers=None): + def explain( + self, + index: Any, + id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about why a specific matches (or doesn't match) a query. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The query definition using the Query DSL :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg analyze_wildcard: Specify whether wildcards and prefix - queries in the query string query should be analyzed (default: false) - :arg analyzer: The analyzer for the query string query + queries in the query string query should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR - :arg df: The default field for query string query (default: - _all) + query (AND or OR). Valid choices are AND, OR. + :arg df: The default field for query string query. Default is + _all. :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -866,26 +967,32 @@ def explain(self, index, id, body=None, params=None, headers=None): "ignore_unavailable", "include_unmapped", ) - def field_caps(self, body=None, index=None, params=None, headers=None): + def field_caps( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about the capabilities of fields among multiple indices. :arg body: An index filter specified with the Query DSL - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of field names + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fields: Comma-separated list of field names. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_unmapped: Indicates whether unmapped fields should - be included in the response. + be included in the response. Default is false. """ return self.transport.perform_request( "POST", @@ -907,51 +1014,63 @@ def field_caps(self, body=None, index=None, params=None, headers=None): "version", "version_type", ) - def get(self, index, id, params=None, headers=None): + def get( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument.") - doc_type = "_doc" - return self.transport.perform_request( - "GET", _make_path(index, doc_type, id), params=params, headers=headers + "GET", _make_path(index, "_doc", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout") - def get_script(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + def get_script( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a script. - :arg id: Script ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + :arg id: Script ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -971,29 +1090,35 @@ def get_script(self, id, params=None, headers=None): "version", "version_type", ) - def get_source(self, index, id, params=None, headers=None): + def get_source( + self, + index: Any, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the source of a document. - :arg index: The name of the index - :arg id: The document ID + :arg index: Index name. + :arg id: Document ID. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + performing the operation. + :arg routing: Routing value. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ for param in (index, id): if param in SKIP_IN_PATH: @@ -1015,30 +1140,36 @@ def get_source(self, index, id, params=None, headers=None): "routing", "stored_fields", ) - def mget(self, body, index=None, params=None, headers=None): + def mget( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to get multiple documents in one request. :arg body: Document identifiers; can be either `docs` - (containing full document information) or `ids` (when index and type is - provided in the URL. - :arg index: The name of the index + (containing full document information) or `ids` (when index is provided + in the URL. + :arg index: Index name. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg preference: Specify the node or shard the operation should - be performed on (default: random) + be performed on. Default is random. :arg realtime: Specify whether to perform the operation in - realtime or search mode + realtime or search mode. :arg refresh: Refresh the shard containing the document before - performing the operation - :arg routing: Specific routing value - :arg stored_fields: A comma-separated list of stored fields to - return in the response + performing the operation. + :arg routing: Routing value. + :arg stored_fields: Comma-separated list of stored fields to + return. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1060,37 +1191,44 @@ def mget(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch(self, body, index=None, params=None, headers=None): + def msearch( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search operations in one request. :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg max_concurrent_shard_requests: The number of concurrent shard requests each sub search executes concurrently per node. This value should be used to limit the impact of the search on the cluster in - order to limit the number of concurrent shard requests Default: 5 - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + order to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1111,26 +1249,33 @@ def msearch(self, body, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def msearch_template(self, body, index=None, params=None, headers=None): + def msearch_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to execute several search template operations in one request. :arg body: The request definitions (metadata-search request definition pairs), separated by newlines - :arg index: A comma-separated list of index names to use as - default + :arg index: Comma-separated list of indices to use as default. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg max_concurrent_searches: Controls the maximum number of - concurrent searches the multi search api will execute + concurrent searches the multi search api will execute. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + be rendered as an integer or an object in the rest search response. + Default is false. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1158,7 +1303,13 @@ def msearch_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def mtermvectors(self, body=None, index=None, params=None, headers=None): + def mtermvectors( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns multiple termvectors in one request. @@ -1170,34 +1321,34 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be returned. Applies to all returned documents unless otherwise specified - in body "params" or "docs". Default: True - :arg fields: A comma-separated list of fields to return. Applies - to all returned documents unless otherwise specified in body "params" or - "docs". - :arg ids: A comma-separated list of documents ids. You must - define ids as parameter or set "ids" or "docs" in the request body + in body 'params' or 'docs'. Default is True. + :arg fields: Comma-separated list of fields to return. Applies + to all returned documents unless otherwise specified in body 'params' or + 'docs'. + :arg ids: Comma-separated list of documents ids. You must define + ids as parameter or set 'ids' or 'docs' in the request body. :arg offsets: Specifies if term offsets should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg payloads: Specifies if term payloads should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg positions: Specifies if term positions should be returned. Applies to all returned documents unless otherwise specified in body - "params" or "docs". Default: True + 'params' or 'docs'. Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random) .Applies to all returned documents - unless otherwise specified in body "params" or "docs". + be performed on. Applies to all returned documents unless otherwise + specified in body 'params' or 'docs'. Default is random. :arg realtime: Specifies if requests are real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. Applies to all returned - documents unless otherwise specified in body "params" or "docs". + near-real-time. Default is True. + :arg routing: Routing value. Applies to all returned documents + unless otherwise specified in body 'params' or 'docs'. :arg term_statistics: Specifies if total term frequency and document frequency should be returned. Applies to all returned documents - unless otherwise specified in body "params" or "docs". - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + unless otherwise specified in body 'params' or 'docs'. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ path = _make_path(index, "_mtermvectors") @@ -1205,18 +1356,28 @@ def mtermvectors(self, body=None, index=None, params=None, headers=None): "POST", path, params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def put_script(self, id, body, context=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def put_script( + self, + id: Any, + body: Any, + context: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a script. - :arg id: Script ID + :arg id: Script ID. :arg body: The document - :arg context: Context name to compile script against - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg context: Script context. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -1233,31 +1394,32 @@ def put_script(self, id, body, context=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "search_type" ) - def rank_eval(self, body, index=None, params=None, headers=None): + def rank_eval( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to evaluate the quality of ranked search results over a set of typical - search queries - + search queries. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The ranking evaluation search definition, including search requests, document ratings and ranking metric definition. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be ignored when unavailable (missing or closed). + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1280,7 +1442,12 @@ def rank_eval(self, body, index=None, params=None, headers=None): "wait_for_active_shards", "wait_for_completion", ) - def reindex(self, body, params=None, headers=None): + def reindex( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or @@ -1290,24 +1457,24 @@ def reindex(self, body, params=None, headers=None): :arg body: The search definition using the Query DSL and the prototype for the index request. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg refresh: Should the affected indexes be refreshed? - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg scroll: Control how long to keep the search context alive - Default: 5m + all documents). + :arg refresh: Should the affected indexes be refreshed?. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg scroll: Specify how long a consistent view of the index + should be maintained for scrolled search. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 + set to `auto`. Default is 1. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m + shards that are unavailable. Default is 1m. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the reindex operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) - :arg wait_for_completion: Should the request should block until - the reindex is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1317,14 +1484,19 @@ def reindex(self, body, params=None, headers=None): ) @query_params("requests_per_second") - def reindex_rethrottle(self, task_id, params=None, headers=None): + def reindex_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Reindex operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1337,13 +1509,19 @@ def reindex_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def render_search_template(self, body=None, id=None, params=None, headers=None): + def render_search_template( + self, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. :arg body: The search definition template and its params - :arg id: The id of the stored search template + :arg id: The id of the stored search template. """ return self.transport.perform_request( "POST", @@ -1354,15 +1532,15 @@ def render_search_template(self, body=None, id=None, params=None, headers=None): ) @query_params() - def scripts_painless_execute(self, body=None, params=None, headers=None): + def scripts_painless_execute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Allows an arbitrary script to be executed and a result to be returned - - - .. warning:: + Allows an arbitrary script to be executed and a result to be returned. - This API is **experimental** so may include breaking changes - or be removed in a future version :arg body: The script to execute """ @@ -1375,18 +1553,25 @@ def scripts_painless_execute(self, body=None, params=None, headers=None): ) @query_params("rest_total_hits_as_int", "scroll") - def scroll(self, body=None, scroll_id=None, params=None, headers=None): + def scroll( + self, + body: Any = None, + scroll_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to retrieve a large numbers of results from a single search request. :arg body: The scroll ID if not passed by URL or query parameter. - :arg scroll_id: The scroll ID for scrolled search + :arg scroll_id: Scroll ID. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response + be rendered as an integer or an object in the rest search response. + Default is false. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search + should be maintained for scrolled search. """ if scroll_id in SKIP_IN_PATH and body in SKIP_IN_PATH: raise ValueError("You need to supply scroll_id or body.") @@ -1419,7 +1604,6 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): "ignore_unavailable", "lenient", "max_concurrent_shard_requests", - "min_compatible_shard_node", "pre_filter_shard_size", "preference", "q", @@ -1444,107 +1628,111 @@ def scroll(self, body=None, scroll_id=None, params=None, headers=None): "typed_keys", "version", ) - def search(self, body=None, index=None, params=None, headers=None): + def search( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns results matching a query. :arg body: The search definition using the Query DSL - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg allow_partial_search_results: Indicate if an error should - be returned if there is a partial search failure or timeout Default: - True + be returned if there is a partial search failure or timeout. Default is + True. :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg batched_reduce_size: The number of shard results that should be reduced at once on the coordinating node. This value should be used as a protection mechanism to reduce the memory overhead per search request if the potential number of shards in the request can be large. - Default: 512 + Default is 512. :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string - :arg docvalue_fields: A comma-separated list of fields to return - as the docvalue representation of a field for each hit + given in the query string. + :arg docvalue_fields: Comma-separated list of fields to return + as the docvalue representation of a field for each hit. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit - :arg from_: Starting offset (default: 0) + about score computation as part of a hit. + :arg from_: Starting offset. Default is 0. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_concurrent_shard_requests: The number of concurrent shard requests per node this search executes concurrently. This value should be used to limit the impact of the search on the cluster in order - to limit the number of concurrent shard requests Default: 5 - :arg min_compatible_shard_node: The minimum compatible version - that all shards involved in search should have for this request to be - successful - :arg pre_filter_shard_size: A threshold that enforces a pre- - filter roundtrip to prefilter search shards based on query rewriting if - the number of shards the search request expands to exceeds the - threshold. This filter roundtrip can limit the number of shards - significantly if for instance a shard can not match any documents based - on its rewrite method ie. if date filters are mandatory to match but the - shard bounds and the query are disjoint. + to limit the number of concurrent shard requests. Default is 5. + :arg pre_filter_shard_size: Threshold that enforces a pre-filter + round-trip to prefilter search shards based on query rewriting if the + number of shards the search request expands to exceeds the threshold. + This filter round-trip can limit the number of shards significantly if + for instance a shard can not match any documents based on its rewrite + method ie. if date filters are mandatory to match but the shard bounds + and the query are disjoint. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting + this request or not, defaults to index level setting. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. :arg seq_no_primary_term: Specify whether to return sequence - number and primary term of the last modification of each hit - :arg size: Number of hits to return (default: 10) - :arg sort: A comma-separated list of : pairs + number and primary term of the last modification of each hit. + :arg size: Number of hits to return. Default is 10. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes - :arg stored_fields: A comma-separated list of stored fields to - return as part of a hit - :arg suggest_field: Specify which field to use for suggestions - :arg suggest_mode: Specify suggest mode Valid choices: missing, - popular, always Default: missing - :arg suggest_size: How many suggestions to return in response + statistical purposes. + :arg stored_fields: Comma-separated list of stored fields to + return. + :arg suggest_field: Specify which field to use for suggestions. + :arg suggest_mode: Specify suggest mode. Valid choices are + missing, popular, always. + :arg suggest_size: How many suggestions to return in response. :arg suggest_text: The source text for which the suggestions - should be returned + should be returned. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. :arg track_scores: Whether to calculate and return scores even - if they are not used for sorting + if they are not used for sorting. :arg track_total_hits: Indicate if the number of documents that - match the query should be tracked + match the query should be tracked. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response - :arg version: Specify whether to return document version as part - of a hit + should be prefixed by their respective types in the response. + :arg version: Whether to return document version as part of a + hit. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1566,27 +1754,32 @@ def search(self, body=None, index=None, params=None, headers=None): "preference", "routing", ) - def search_shards(self, index=None, params=None, headers=None): + def search_shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the indices and shards that a search request would be executed against. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + be performed on. Default is random. + :arg routing: Routing value. """ return self.transport.perform_request( "GET", _make_path(index, "_search_shards"), params=params, headers=headers @@ -1607,41 +1800,49 @@ def search_shards(self, index=None, params=None, headers=None): "search_type", "typed_keys", ) - def search_template(self, body, index=None, params=None, headers=None): + def search_template( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to use the Mustache language to pre-render a search definition. :arg body: The search definition template and its params - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg ccs_minimize_roundtrips: Indicates whether network round- trips should be minimized as part of cross-cluster search requests - execution Default: true + execution. Default is True. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg explain: Specify whether to return detailed information - about score computation as part of a hit + about score computation as part of a hit. :arg ignore_throttled: Whether specified concrete, expanded or - aliased indices should be ignored when throttled + aliased indices should be ignored when throttled. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg profile: Specify whether to profile the query execution + be performed on. Default is random. + :arg profile: Specify whether to profile the query execution. :arg rest_total_hits_as_int: Indicates whether hits.total should - be rendered as an integer or an object in the rest search response - :arg routing: A comma-separated list of specific routing values + be rendered as an integer or an object in the rest search response. + Default is false. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch + should be maintained for scrolled search. + :arg search_type: Search operation type. Valid choices are + query_then_fetch, query_and_fetch, dfs_query_then_fetch, + dfs_query_and_fetch. :arg typed_keys: Specify whether aggregation and suggester names - should be prefixed by their respective types in the response + should be prefixed by their respective types in the response. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -1667,7 +1868,14 @@ def search_template(self, body, index=None, params=None, headers=None): "version", "version_type", ) - def termvectors(self, index, body=None, id=None, params=None, headers=None): + def termvectors( + self, + index: Any, + body: Any = None, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information and statistics about terms in the fields of a particular document. @@ -1676,28 +1884,28 @@ def termvectors(self, index, body=None, id=None, params=None, headers=None): :arg index: The index in which the document resides. :arg body: Define parameters and or supply a document to get termvectors for. See documentation. - :arg id: The id of the document, when not specified a doc param - should be supplied. + :arg id: Document ID. When not specified a doc param should be + supplied. :arg field_statistics: Specifies if document count, sum of document frequencies and sum of total term frequencies should be - returned. Default: True - :arg fields: A comma-separated list of fields to return. + returned. Default is True. + :arg fields: Comma-separated list of fields to return. :arg offsets: Specifies if term offsets should be returned. - Default: True + Default is True. :arg payloads: Specifies if term payloads should be returned. - Default: True + Default is True. :arg positions: Specifies if term positions should be returned. - Default: True + Default is True. :arg preference: Specify the node or shard the operation should - be performed on (default: random). + be performed on. Default is random. :arg realtime: Specifies if request is real-time as opposed to - near-real-time (default: true). - :arg routing: Specific routing value. + near-real-time. Default is True. + :arg routing: Routing value. :arg term_statistics: Specifies if total term frequency and - document frequency should be returned. - :arg version: Explicit version number for concurrency control - :arg version_type: Specific version type Valid choices: - internal, external, external_gte, force + document frequency should be returned. Default is false. + :arg version: Explicit version number for concurrency control. + :arg version_type: Specific version type. Valid choices are + internal, external, external_gte, force. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -1722,43 +1930,48 @@ def termvectors(self, index, body=None, id=None, params=None, headers=None): "timeout", "wait_for_active_shards", ) - def update(self, index, id, body, params=None, headers=None): + def update( + self, + index: Any, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a document with a script or partial document. - :arg index: The name of the index - :arg id: Document ID + :arg index: Index name. + :arg id: Document ID. :arg body: The request definition requires either `script` or partial `doc` :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field - :arg if_primary_term: only perform the update operation if the - last operation that has changed the document has the specified primary - term - :arg if_seq_no: only perform the update operation if the last - operation that has changed the document has the specified sequence - number - :arg lang: The script language (default: painless) + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. + :arg if_primary_term: only perform the operation if the last + operation that has changed the document has the specified primary term. + :arg if_seq_no: only perform the operation if the last operation + that has changed the document has the specified sequence number. + :arg lang: The script language. Default is painless. :arg refresh: If `true` then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` (the default) then - do nothing with refreshes. Valid choices: true, false, wait_for - :arg require_alias: When true, requires destination is an alias. - Default is false + do nothing with refreshes. Valid choices are true, false, wait_for. + :arg require_alias: When true, requires destination to be an + alias. Default is false. :arg retry_on_conflict: Specify how many times should the - operation be retried when a conflict occurs (default: 0) - :arg routing: Specific routing value - :arg timeout: Explicit operation timeout + operation be retried when a conflict occurs. Default is 0. + :arg routing: Routing value. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update operation. - Defaults to 1, meaning the primary shard only. Set to `all` for all - shard copies, otherwise set to any non-negative value less than or equal - to the total number of copies for the shard (number of replicas + 1) + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. """ for param in (index, id, body): if param in SKIP_IN_PATH: @@ -1803,91 +2016,92 @@ def update(self, index, id, body, params=None, headers=None): "terminate_after", "timeout", "version", - "version_type", "wait_for_active_shards", "wait_for_completion", ) - def update_by_query(self, index, body=None, params=None, headers=None): + def update_by_query( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs an update on every document in the index without changing the source, for example to pick up a mapping change. - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg body: The search definition using the Query DSL - search; leave empty to perform the operation on all types :arg _source: True or false to return the _source field or not, - or a list of fields to return - :arg _source_excludes: A list of fields to exclude from the - returned _source field - :arg _source_includes: A list of fields to extract and return - from the _source field + or a list of fields to return. + :arg _source_excludes: List of fields to exclude from the + returned _source field. + :arg _source_includes: List of fields to extract and return from + the _source field. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string - :arg conflicts: What to do when the update by query hits version - conflicts? Valid choices: abort, proceed Default: abort + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. + :arg conflicts: What to do when the operation encounters version + conflicts?. Valid choices are abort, proceed. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg from_: Starting offset (default: 0) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg from_: Starting offset. Default is 0. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored + as providing text to a numeric field) should be ignored. :arg max_docs: Maximum number of documents to process (default: - all documents) - :arg pipeline: Ingest pipeline to set on index requests made by - this action. (default: none) + all documents). + :arg pipeline: The pipeline id to preprocess incoming documents + with. :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg q: Query in the Lucene query string syntax - :arg refresh: Should the affected indexes be refreshed? + be performed on. Default is random. + :arg q: Query in the Lucene query string syntax. + :arg refresh: Should the affected indexes be refreshed?. :arg request_cache: Specify if request cache should be used for - this request or not, defaults to index level setting - :arg requests_per_second: The throttle to set on this request in - sub-requests per second. -1 means no throttle. - :arg routing: A comma-separated list of specific routing values + this request or not, defaults to index level setting. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. Default is 0. + :arg routing: Comma-separated list of specific routing values. :arg scroll: Specify how long a consistent view of the index - should be maintained for scrolled search - :arg scroll_size: Size on the scroll request powering the update - by query Default: 100 + should be maintained for scrolled search. + :arg scroll_size: Size on the scroll request powering the + operation. Default is 100. :arg search_timeout: Explicit timeout for each search request. Defaults to no timeout. - :arg search_type: Search operation type Valid choices: - query_then_fetch, dfs_query_then_fetch - :arg size: Deprecated, please use `max_docs` instead + :arg search_type: Search operation type. Valid choices are + query_then_fetch, dfs_query_then_fetch. + :arg size: Deprecated, please use `max_docs` instead. :arg slices: The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be - set to `auto`. Default: 1 - :arg sort: A comma-separated list of : pairs + set to `auto`. Default is 1. + :arg sort: Comma-separated list of : pairs. :arg stats: Specific 'tag' of the request for logging and - statistical purposes + statistical purposes. :arg terminate_after: The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early. :arg timeout: Time each individual bulk request should wait for - shards that are unavailable. Default: 1m - :arg version: Specify whether to return document version as part - of a hit - :arg version_type: Should the document increment the version - number (internal) on hit or not (reindex) + shards that are unavailable. Default is 1m. + :arg version: Whether to return document version as part of a + hit. :arg wait_for_active_shards: Sets the number of shard copies - that must be active before proceeding with the update by query - operation. Defaults to 1, meaning the primary shard only. Set to `all` - for all shard copies, otherwise set to any non-negative value less than - or equal to the total number of copies for the shard (number of replicas - + 1) - :arg wait_for_completion: Should the request should block until - the update by query operation is complete. Default: True + that must be active before proceeding with the operation. Defaults to 1, + meaning the primary shard only. Set to `all` for all shard copies, + otherwise set to any non-negative value less than or equal to the total + number of copies for the shard (number of replicas + 1). Default is 1. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is True. """ # from is a reserved word so it cannot be used, use from_ instead if "from_" in params: @@ -1905,15 +2119,20 @@ def update_by_query(self, index, body=None, params=None, headers=None): ) @query_params("requests_per_second") - def update_by_query_rethrottle(self, task_id, params=None, headers=None): + def update_by_query_rethrottle( + self, + task_id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the number of requests per second for a particular Update By Query operation. - :arg task_id: The task id to rethrottle - :arg requests_per_second: The throttle to set on this request in - floating sub-requests per second. -1 means set no throttle. + :arg task_id: The task id to rethrottle. + :arg requests_per_second: The throttle for this request in sub- + requests per second. -1 means no throttle. """ if task_id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'task_id'.") @@ -1926,119 +2145,117 @@ def update_by_query_rethrottle(self, task_id, params=None, headers=None): ) @query_params() - def get_script_context(self, params=None, headers=None): + def get_script_context( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all script contexts. - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return self.transport.perform_request( "GET", "/_script_context", params=params, headers=headers ) @query_params() - def get_script_languages(self, params=None, headers=None): + def get_script_languages( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns available script types, languages and contexts - + Returns available script types, languages and contexts. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version """ return self.transport.perform_request( "GET", "/_script_language", params=params, headers=headers ) - @query_params() - def list_all_point_in_time(self, params=None, headers=None): + @query_params( + "allow_partial_pit_creation", + "expand_wildcards", + "keep_alive", + "preference", + "routing", + ) + def create_pit( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns the list of active point in times searches + Creates point in time context. + + + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg allow_partial_pit_creation: Allow if point in time can be + created with partial failures. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg keep_alive: Specify the keep alive for point in time. + :arg preference: Specify the node or shard the operation should + be performed on. Default is random. + :arg routing: Comma-separated list of specific routing values. """ + if index in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'index'.") + return self.transport.perform_request( - "GET", - _make_path("_search", "point_in_time", "_all"), + "POST", + _make_path(index, "_search", "point_in_time"), params=params, headers=headers, ) @query_params() - def delete_point_in_time(self, body=None, all=False, params=None, headers=None): + def delete_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Delete a point in time - + Deletes all active point in time searches. - :arg body: a point-in-time id to delete - :arg all: set it to `True` to delete all alive point in time. """ - - path = ( - _make_path("_search", "point_in_time", "_all") - if all - else _make_path("_search", "point_in_time") - ) return self.transport.perform_request( - "DELETE", path, params=params, headers=headers, body=body + "DELETE", "/_search/point_in_time/_all", params=params, headers=headers ) - @query_params( - "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" - ) - def create_point_in_time(self, index=None, params=None, headers=None): + @query_params() + def delete_pit( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Create a point in time that can be used in subsequent searches + Deletes one or more point in time searches based on the IDs passed. - :arg index: A comma-separated list of index names to open point - in time; use `_all` or empty string to perform the operation on all - indices - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg keep_alive: Specific the time to live for the point in time - :arg preference: Specify the node or shard the operation should - be performed on (default: random) - :arg routing: Specific routing value + :arg body: The point-in-time ids to be deleted """ return self.transport.perform_request( - "POST", - _make_path(index, "_search", "point_in_time"), + "DELETE", + "/_search/point_in_time", params=params, headers=headers, + body=body, ) @query_params() - def terms_enum(self, index, body=None, params=None, headers=None): + def get_all_pits( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - The terms enum API can be used to discover terms in the index that begin with - the provided string. It is designed for low-latency look-ups used in auto- - complete scenarios. - + Lists all active point in time searches. - .. warning:: - - This API is **beta** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names to search; use - `_all` or empty string to perform the operation on all indices - :arg body: field name, string which is the prefix expected in - matching terms, timeout and size for max number of results """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - return self.transport.perform_request( - "POST", - _make_path(index, "_terms_enum"), - params=params, - headers=headers, - body=body, + "GET", "/_search/point_in_time/_all", params=params, headers=headers ) diff --git a/opensearchpy/client/__init__.pyi b/opensearchpy/client/__init__.pyi deleted file mode 100644 index 64f21ca7..00000000 --- a/opensearchpy/client/__init__.pyi +++ /dev/null @@ -1,1130 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import unicode_literals - -import logging -from typing import Any, Collection, MutableMapping, Optional, Tuple, Type, Union - -from ..transport import Transport -from .cat import CatClient -from .cluster import ClusterClient -from .dangling_indices import DanglingIndicesClient -from .features import FeaturesClient -from .indices import IndicesClient -from .ingest import IngestClient -from .nodes import NodesClient -from .remote import RemoteClient -from .security import SecurityClient -from .snapshot import SnapshotClient -from .tasks import TasksClient - -logger: logging.Logger - -class OpenSearch(object): - transport: Transport - - cat: CatClient - cluster: ClusterClient - features: FeaturesClient - indices: IndicesClient - ingest: IngestClient - nodes: NodesClient - remote: RemoteClient - security: SecurityClient - snapshot: SnapshotClient - tasks: TasksClient - def __init__( - self, hosts: Any = ..., transport_class: Type[Transport] = ..., **kwargs: Any - ) -> None: ... - def __repr__(self) -> str: ... - def __enter__(self) -> "OpenSearch": ... - def __exit__(self, *_: Any) -> None: ... - def close(self) -> None: ... - # AUTO-GENERATED-API-DEFINITIONS # - def ping( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - id: Any, - *, - body: Any, - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def index( - self, - index: Any, - *, - body: Any, - id: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - op_type: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def bulk( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - min_score: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - id: Any, - *, - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query( - self, - index: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_script( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def exists_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def explain( - self, - index: Any, - id: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - lenient: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def field_caps( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_unmapped: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_source( - self, - index: Any, - id: Any, - *, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mget( - self, - *, - body: Any, - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - refresh: Optional[Any] = ..., - routing: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def msearch_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - max_concurrent_searches: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def mtermvectors( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - ids: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_script( - self, - id: Any, - *, - body: Any, - context: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rank_eval( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - search_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex( - self, - *, - body: Any, - max_docs: Optional[Any] = ..., - refresh: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - scroll: Optional[Any] = ..., - slices: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reindex_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def render_search_template( - self, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scripts_painless_execute( - self, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def scroll( - self, - *, - body: Optional[Any] = ..., - scroll_id: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - scroll: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - allow_partial_search_results: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - batched_reduce_size: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - docvalue_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_concurrent_shard_requests: Optional[Any] = ..., - min_compatible_shard_node: Optional[Any] = ..., - pre_filter_shard_size: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - seq_no_primary_term: Optional[Any] = ..., - size: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - stored_fields: Optional[Any] = ..., - suggest_field: Optional[Any] = ..., - suggest_mode: Optional[Any] = ..., - suggest_size: Optional[Any] = ..., - suggest_text: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - track_scores: Optional[Any] = ..., - track_total_hits: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - version: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_shards( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def search_template( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - ccs_minimize_roundtrips: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_throttled: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - preference: Optional[Any] = ..., - profile: Optional[Any] = ..., - rest_total_hits_as_int: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - search_type: Optional[Any] = ..., - typed_keys: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def termvectors( - self, - index: Any, - *, - body: Optional[Any] = ..., - id: Optional[Any] = ..., - field_statistics: Optional[Any] = ..., - fields: Optional[Any] = ..., - offsets: Optional[Any] = ..., - payloads: Optional[Any] = ..., - positions: Optional[Any] = ..., - preference: Optional[Any] = ..., - realtime: Optional[Any] = ..., - routing: Optional[Any] = ..., - term_statistics: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update( - self, - index: Any, - id: Any, - *, - body: Any, - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - if_primary_term: Optional[Any] = ..., - if_seq_no: Optional[Any] = ..., - lang: Optional[Any] = ..., - refresh: Optional[Any] = ..., - require_alias: Optional[Any] = ..., - retry_on_conflict: Optional[Any] = ..., - routing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query( - self, - index: Any, - *, - body: Optional[Any] = ..., - _source: Optional[Any] = ..., - _source_excludes: Optional[Any] = ..., - _source_includes: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - conflicts: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - from_: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - max_docs: Optional[Any] = ..., - pipeline: Optional[Any] = ..., - preference: Optional[Any] = ..., - q: Optional[Any] = ..., - refresh: Optional[Any] = ..., - request_cache: Optional[Any] = ..., - requests_per_second: Optional[Any] = ..., - routing: Optional[Any] = ..., - scroll: Optional[Any] = ..., - scroll_size: Optional[Any] = ..., - search_timeout: Optional[Any] = ..., - search_type: Optional[Any] = ..., - size: Optional[Any] = ..., - slices: Optional[Any] = ..., - sort: Optional[Any] = ..., - stats: Optional[Any] = ..., - terminate_after: Optional[Any] = ..., - timeout: Optional[Any] = ..., - version: Optional[Any] = ..., - version_type: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_by_query_rethrottle( - self, - task_id: Any, - *, - requests_per_second: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_context( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_script_languages( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def list_all_point_in_time( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_point_in_time( - self, - *, - body: Optional[Any] = ..., - all: Optional[bool] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_point_in_time( - self, - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - keep_alive: Optional[Any] = ..., - preference: Optional[Any] = ..., - routing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def terms_enum( - self, - index: Any, - *, - body: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/_patch.py b/opensearchpy/client/_patch.py new file mode 100644 index 00000000..3f156906 --- /dev/null +++ b/opensearchpy/client/_patch.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import warnings +from typing import Any + +from .utils import SKIP_IN_PATH, query_params + + +@query_params() +def list_all_point_in_time(self: Any, params: Any = None, headers: Any = None) -> Any: + """ + Returns the list of active point in times searches + + .. warning:: + + This API will be removed in a future version + Use 'get_all_pits' API instead. + + """ + warnings.warn( + "The 'list_all_point_in_time' API is deprecated and will be removed in a future version. Use 'get_all_pits' API instead.", + DeprecationWarning, + ) + + return self.get_all_pits(params=params, headers=headers) + + +@query_params( + "expand_wildcards", "ignore_unavailable", "keep_alive", "preference", "routing" +) +def create_point_in_time( + self: Any, index: Any, params: Any = None, headers: Any = None +) -> Any: + """ + Create a point in time that can be used in subsequent searches + + + :arg index: A comma-separated list of index names to open point + in time; use `_all` or empty string to perform the operation on all + indices + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices: open, + closed, hidden, none, all Default: open + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed) + :arg keep_alive: Specific the time to live for the point in time + :arg preference: Specify the node or shard the operation should + be performed on (default: random) + :arg routing: Specific routing value + + .. warning:: + + This API will be removed in a future version + Use 'create_pit' API instead. + + """ + warnings.warn( + "The 'create_point_in_time' API is deprecated and will be removed in a future version. Use 'create_pit' API instead.", + DeprecationWarning, + ) + + return self.create_pit(index=index, params=params, headers=headers) + + +@query_params() +def delete_point_in_time( + self: Any, + body: Any = None, + all: bool = False, + params: Any = None, + headers: Any = None, +) -> Any: + """ + Delete a point in time + + + :arg body: a point-in-time id to delete + :arg all: set it to `True` to delete all alive point in time. + + .. warning:: + + This API will be removed in a future version + Use 'delete_all_pits' or 'delete_pit' API instead. + + """ + warnings.warn( + "The 'delete_point_in_time' API is deprecated and will be removed in a future version. Use 'delete_all_pits' or 'delete_pit' API instead.", + DeprecationWarning, + ) + + if all: + return self.delete_all_pits(params=params, headers=headers) + else: + return self.delete_pit(body=body, params=params, headers=headers) + + +@query_params() +def health_check(self: Any, params: Any = None, headers: Any = None) -> Any: + """ + Checks to see if the Security plugin is up and running. + + .. warning:: + + This API will be removed in a future version + Use 'health' API instead. + + """ + warnings.warn( + "The 'health_check' API in security client is deprecated and will be removed in a future version. Use 'health' API instead.", + DeprecationWarning, + ) + + return self.health(params=params, headers=headers) + + +@query_params() +def update_audit_config( + self: Any, body: Any, params: Any = None, headers: Any = None +) -> Any: + """ + A PUT call updates the audit configuration. + + .. warning:: + + This API will be removed in a future version + Use 'update_audit_configuration' API instead. + + """ + warnings.warn( + "The 'update_audit_config' API in security client is deprecated and will be removed in a future version. Use 'update_audit_configuration' API instead.", + DeprecationWarning, + ) + + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.update_audit_configuration(params=params, headers=headers, body=body) diff --git a/opensearchpy/client/cat.py b/opensearchpy/client/cat.py index cc1106d5..91adbf35 100644 --- a/opensearchpy/client/cat.py +++ b/opensearchpy/client/cat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,69 +26,106 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class CatClient(NamespacedClient): @query_params("expand_wildcards", "format", "h", "help", "local", "s", "v") - def aliases(self, name=None, params=None, headers=None): + def aliases( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Shows information about currently configured aliases to indices including filter and routing infos. - :arg name: A comma-separated list of alias names to return + :arg name: Comma-separated list of alias names. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "aliases", name), params=params, headers=headers ) + @query_params() + def all_pit_segments( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Lists all active point-in-time segments. + + """ + return self.transport.perform_request( + "GET", "/_cat/pit_segments/_all", params=params, headers=headers + ) + @query_params( "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - def allocation(self, node_id=None, params=None, headers=None): + def allocation( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides a snapshot of how many shards are allocated to each data node and how much disk space they are using. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -96,57 +134,139 @@ def allocation(self, node_id=None, params=None, headers=None): headers=headers, ) + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + def cluster_manager( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about the cluster-manager node. + + + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local: Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", "/_cat/cluster_manager", params=params, headers=headers + ) + @query_params("format", "h", "help", "s", "v") - def count(self, index=None, params=None, headers=None): + def count( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides quick access to the document count of the entire cluster, or individual indices. - :arg index: A comma-separated list of index names to limit the - returned information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "count", index), params=params, headers=headers ) + @query_params("bytes", "format", "h", "help", "s", "v") + def fielddata( + self, + fields: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Shows how much heap memory is currently being used by fielddata on every data + node in the cluster. + + + :arg fields: Comma-separated list of fields to return in the + output. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", + _make_path("_cat", "fielddata", fields), + params=params, + headers=headers, + ) + @query_params("format", "h", "help", "s", "time", "ts", "v") - def health(self, params=None, headers=None): + def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a concise representation of the cluster health. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg ts: Set to false to disable timestamping Default: True - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg ts: Set to false to disable timestamping. Default is True. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/health", params=params, headers=headers ) @query_params("help", "s") - def help(self, params=None, headers=None): + def help( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns help for the Cat APIs. - :arg help: Return help information + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by + to sort by. """ return self.transport.perform_request( "GET", "/_cat", params=params, headers=headers @@ -154,6 +274,7 @@ def help(self, params=None, headers=None): @query_params( "bytes", + "cluster_manager_timeout", "expand_wildcards", "format", "h", @@ -162,488 +283,581 @@ def help(self, params=None, headers=None): "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "pri", "s", "time", "v", ) - def indices(self, index=None, params=None, headers=None): + def indices( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about indices: number of primaries and replicas, document counts, disk size, ... - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg health: A health status ("green", "yellow", or "red" to - filter only indices matching the specified health status Valid choices: - green, yellow, red - :arg help: Return help information + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg health: Health status ('green', 'yellow', or 'red') to + filter only indices matching the specified health status. Valid choices + are green, yellow, red. + :arg help: Return help information. Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg pri: Set to true to return stats only for primary shards + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg pri: Set to true to return stats only for primary shards. + Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "indices", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - def master(self, params=None, headers=None): + def master( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the master node. + Returns information about the cluster-manager node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ from warnings import warn - warn("Deprecated: use `cluster_manager` instead") + warn( + "Deprecated: To promote inclusive language, please use '/_cat/cluster_manager' instead." + ) return self.transport.perform_request( "GET", "/_cat/master", params=params, headers=headers ) - @query_params("format", "h", "help", "local", "cluster_manager_timeout", "s", "v") - def cluster_manager(self, params=None, headers=None): + @query_params( + "cluster_manager_timeout", + "format", + "h", + "help", + "local", + "master_timeout", + "s", + "v", + ) + def nodeattrs( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about the cluster_manager node. + Returns information about custom node attributes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/cluster_manager", params=params, headers=headers + "GET", "/_cat/nodeattrs", params=params, headers=headers ) @query_params( "bytes", + "cluster_manager_timeout", "format", "full_id", "h", "help", - "include_unloaded_segments", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - def nodes(self, params=None, headers=None): + def nodes( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic statistics about performance of cluster nodes. - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. :arg full_id: Return the full node ID instead of the shortened - version (default: false) - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory - :arg local: Calculate the selected nodes using the local cluster - state rather than the state from master node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + version. Default is false. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg local (Deprecated: This parameter does not cause this API + to act locally.): Return local information, do not retrieve the state + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/nodes", params=params, headers=headers ) @query_params( - "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" - ) - def recovery(self, index=None, params=None, headers=None): - """ - Returns information about index shard recoveries, both on-going completed. - - - :arg index: Comma-separated list or wildcard expression of index - names to limit the returned information - :arg active_only: If `true`, the response only includes ongoing - shard recoveries - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg detailed: If `true`, the response includes detailed - information about shard recoveries - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers - """ - return self.transport.perform_request( - "GET", _make_path("_cat", "recovery", index), params=params, headers=headers - ) - - @query_params( - "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - def shards(self, index=None, params=None, headers=None): + def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides a detailed view of shard allocation on nodes. + Returns a concise representation of the cluster pending tasks. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", _make_path("_cat", "shards", index), params=params, headers=headers + "GET", "/_cat/pending_tasks", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - def segments(self, index=None, params=None, headers=None): + @query_params() + def pit_segments( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Provides low-level information about the segments in the shards of an index. + List segments for one or several PITs. - :arg index: A comma-separated list of index names to limit the - returned information - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers """ return self.transport.perform_request( - "GET", _make_path("_cat", "segments", index), params=params, headers=headers + "GET", "/_cat/pit_segments", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", - "time", "v", ) - def pending_tasks(self, params=None, headers=None): + def plugins( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns a concise representation of the cluster pending tasks. + Returns information about installed plugins across nodes node. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/pending_tasks", params=params, headers=headers + "GET", "/_cat/plugins", params=params, headers=headers ) @query_params( + "active_only", "bytes", "detailed", "format", "h", "help", "s", "time", "v" + ) + def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Returns information about index shard recoveries, both on-going completed. + + + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + shard recoveries. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg detailed: If `true`, the response includes detailed + information about shard recoveries. Default is false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg s: Comma-separated list of column names or column aliases + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. + """ + return self.transport.perform_request( + "GET", _make_path("_cat", "recovery", index), params=params, headers=headers + ) + + @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", - "size", "v", ) - def thread_pool(self, thread_pool_patterns=None, params=None, headers=None): + def repositories( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns cluster-wide thread pool statistics per node. By default the active, - queue and rejected statistics are returned for all thread pools. + Returns information about snapshot repositories registered in the cluster. - :arg thread_pool_patterns: A comma-separated list of regular- - expressions to filter the thread pools in the output - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: The multiplier in which to display values Valid - choices: , k, m, g, t, p - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", - _make_path("_cat", "thread_pool", thread_pool_patterns), - params=params, - headers=headers, + "GET", "/_cat/repositories", params=params, headers=headers ) - @query_params("bytes", "format", "h", "help", "s", "v") - def fielddata(self, fields=None, params=None, headers=None): + @query_params( + "active_only", + "bytes", + "completed_only", + "detailed", + "format", + "h", + "help", + "s", + "shards", + "time", + "v", + ) + def segment_replication( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Shows how much heap memory is currently being used by fielddata on every data - node in the cluster. + Returns information about both on-going and latest completed Segment + Replication events. - :arg fields: A comma-separated list of fields to return in the - output - :arg bytes: The unit in which to display byte values Valid - choices: b, k, kb, m, mb, g, gb, t, tb, p, pb - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list or wildcard expression of index + names to limit the returned information. + :arg active_only: If `true`, the response only includes ongoing + segment replication events. Default is false. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg completed_only: If `true`, the response only includes + latest completed segment replication events. Default is false. + :arg detailed: If `true`, the response includes detailed + information about segment replications. Default is false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg shards: Comma-separated list of shards to display. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", - _make_path("_cat", "fielddata", fields), + _make_path("_cat", "segment_replication", index), params=params, headers=headers, ) @query_params( + "bytes", + "cluster_manager_timeout", "format", "h", "help", - "include_bootstrap", - "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - def plugins(self, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about installed plugins across nodes node. + Provides low-level information about the segments in the shards of an index. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg include_bootstrap: Include bootstrap plugins in the - response - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/plugins", params=params, headers=headers + "GET", _make_path("_cat", "segments", index), params=params, headers=headers ) @query_params( + "bytes", + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", + "time", "v", ) - def nodeattrs(self, params=None, headers=None): + def shards( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about custom node attributes. + Provides a detailed view of shard allocation on nodes. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg index: Comma-separated list of indices to limit the + returned information. + :arg bytes: The unit in which to display byte values. Valid + choices are b, k, kb, m, mb, g, gb, t, tb, p, pb. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/nodeattrs", params=params, headers=headers + "GET", _make_path("_cat", "shards", index), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", + "size", "v", ) - def repositories(self, params=None, headers=None): + def thread_pool( + self, + thread_pool_patterns: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about snapshot repositories registered in the cluster. + Returns cluster-wide thread pool statistics per node. By default the active, + queue and rejected statistics are returned for all thread pools. - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg thread_pool_patterns: Comma-separated list of regular- + expressions to filter the thread pools in the output. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from master node - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg size: The multiplier in which to display values. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( - "GET", "/_cat/repositories", params=params, headers=headers + "GET", + _make_path("_cat", "thread_pool", thread_pool_patterns), + params=params, + headers=headers, ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "s", "time", "v", ) - def snapshots(self, repository=None, params=None, headers=None): + def snapshots( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all snapshots in a specific repository. - :arg repository: Name of repository from which to fetch the - snapshot information - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg ignore_unavailable: Set to true to ignore unavailable - snapshots - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", @@ -664,101 +878,76 @@ def snapshots(self, repository=None, params=None, headers=None): "time", "v", ) - def tasks(self, params=None, headers=None): + def tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the tasks currently executing on one or more nodes in the cluster. - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg nodes: A comma-separated list of node IDs or names to limit + :arg detailed: Return detailed task information. Default is + false. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers + to sort by. + :arg time: The unit in which to display time values. Valid + choices are d, h, m, s, ms, micros, nanos. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", "/_cat/tasks", params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "format", "h", "help", "local", "master_timeout", - "cluster_manager_timeout", "s", "v", ) - def templates(self, name=None, params=None, headers=None): + def templates( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about existing templates. - :arg name: A pattern that returned template names must match - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg h: Comma-separated list of column names to display - :arg help: Return help information + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg format: A short version of the Accept header, e.g. json, + yaml. + :arg h: Comma-separated list of column names to display. + :arg help: Return help information. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg s: Comma-separated list of column names or column aliases - to sort by - :arg v: Verbose mode. Display column headers + to sort by. + :arg v: Verbose mode. Display column headers. Default is false. """ return self.transport.perform_request( "GET", _make_path("_cat", "templates", name), params=params, headers=headers ) - - @query_params( - "allow_no_match", "format", "from_", "h", "help", "s", "size", "time", "v" - ) - def transforms(self, transform_id=None, params=None, headers=None): - """ - Gets configuration and usage information about transforms. - - - :arg transform_id: The id of the transform for which to get - stats. '_all' or '*' implies all transforms - :arg allow_no_match: Whether to ignore if a wildcard expression - matches no transforms. (This includes `_all` string or when no - transforms have been specified) - :arg format: a short version of the Accept header, e.g. json, - yaml - :arg from_: skips a number of transform configs, defaults to 0 - :arg h: Comma-separated list of column names to display - :arg help: Return help information - :arg s: Comma-separated list of column names or column aliases - to sort by - :arg size: specifies a max number of transforms to get, defaults - to 100 - :arg time: The unit in which to display time values Valid - choices: d, h, m, s, ms, micros, nanos - :arg v: Verbose mode. Display column headers - """ - # from is a reserved word so it cannot be used, use from_ instead - if "from_" in params: - params["from"] = params.pop("from_") - - return self.transport.perform_request( - "GET", - _make_path("_cat", "transforms", transform_id), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/cat.pyi b/opensearchpy/client/cat.pyi deleted file mode 100644 index e29e9253..00000000 --- a/opensearchpy/client/cat.pyi +++ /dev/null @@ -1,555 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class CatClient(NamespacedClient): - def aliases( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation( - self, - *, - node_id: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def count( - self, - *, - index: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def health( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - ts: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def help( - self, - *, - help: Optional[Any] = ..., - s: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def indices( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - health: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pri: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def master( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cluster_manager( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodes( - self, - *, - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - full_id: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - bytes: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shards( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def thread_pool( - self, - *, - thread_pool_patterns: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def fielddata( - self, - *, - fields: Optional[Any] = ..., - bytes: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def plugins( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - include_bootstrap: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def nodeattrs( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def repositories( - self, - *, - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def snapshots( - self, - *, - repository: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def tasks( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - s: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def templates( - self, - *, - name: Optional[Any] = ..., - format: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - s: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def transforms( - self, - *, - transform_id: Optional[Any] = ..., - allow_no_match: Optional[Any] = ..., - format: Optional[Any] = ..., - from_: Optional[Any] = ..., - h: Optional[Any] = ..., - help: Optional[Any] = ..., - s: Optional[Any] = ..., - size: Optional[Any] = ..., - time: Optional[Any] = ..., - v: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/client.py b/opensearchpy/client/client.py new file mode 100644 index 00000000..7f0b67c6 --- /dev/null +++ b/opensearchpy/client/client.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from typing import Any, Optional, Type + +from opensearchpy.client.utils import _normalize_hosts +from opensearchpy.transport import Transport + + +class Client(object): + """ + A generic async OpenSearch client. + """ + + def __init__( + self, + hosts: Optional[str] = None, + transport_class: Type[Transport] = Transport, + **kwargs: Any + ) -> None: + """ + :arg hosts: list of nodes, or a single node, we should connect to. + Node should be a dictionary ({"host": "localhost", "port": 9200}), + the entire dictionary will be passed to the :class:`~opensearchpy.Connection` + class as kwargs, or a string in the format of ``host[:port]`` which will be + translated to a dictionary automatically. If no value is given the + :class:`~opensearchpy.Connection` class defaults will be used. + + :arg transport_class: :class:`~opensearchpy.Transport` subclass to use. + + :arg kwargs: any additional arguments will be passed on to the + :class:`~opensearchpy.Transport` class and, subsequently, to the + :class:`~opensearchpy.Connection` instances. + """ + self.transport = transport_class(_normalize_hosts(hosts), **kwargs) diff --git a/opensearchpy/client/cluster.py b/opensearchpy/client/cluster.py index fd749cbc..f2770f2d 100644 --- a/opensearchpy/client/cluster.py +++ b/opensearchpy/client/cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,16 +26,29 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class ClusterClient(NamespacedClient): @query_params( + "awareness_attribute", + "cluster_manager_timeout", "expand_wildcards", "level", "local", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", "wait_for_events", @@ -43,37 +57,46 @@ class ClusterClient(NamespacedClient): "wait_for_nodes", "wait_for_status", ) - def health(self, index=None, params=None, headers=None): + def health( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns basic information about the health of the cluster. - :arg index: Limit the information returned to a specific index + :arg index: Limit the information returned to specific indicies. + :arg awareness_attribute: The awareness attribute for which the + health is required. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg level: Specify the level of detail for returned information - Valid choices: cluster, indices, shards Default: cluster + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg level: Specify the level of detail for returned + information. Valid choices are cluster, indices, shards, + awareness_attributes. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Wait until the specified number of - shards is active + shards is active. :arg wait_for_events: Wait until all currently queued events - with the given priority are processed Valid choices: immediate, urgent, - high, normal, low, languid + with the given priority are processed. Valid choices are immediate, + urgent, high, normal, low, languid. :arg wait_for_no_initializing_shards: Whether to wait until - there are no initializing shards in the cluster + there are no initializing shards in the cluster. :arg wait_for_no_relocating_shards: Whether to wait until there - are no relocating shards in the cluster + are no relocating shards in the cluster. :arg wait_for_nodes: Wait until the specified number of nodes is - available - :arg wait_for_status: Wait until cluster is in a specific state - Valid choices: green, yellow, red + available. + :arg wait_for_status: Wait until cluster is in a specific state. + Valid choices are green, yellow, red. """ return self.transport.perform_request( "GET", @@ -82,17 +105,24 @@ def health(self, index=None, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - def pending_tasks(self, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + def pending_tasks( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of any cluster-level changes (e.g. create index, update mapping, allocate or fail shard) which have not yet been executed. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", "/_cluster/pending_tasks", params=params, headers=headers @@ -100,43 +130,52 @@ def pending_tasks(self, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", "wait_for_metadata_version", "wait_for_timeout", ) - def state(self, metric=None, index=None, params=None, headers=None): + def state( + self, + metric: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a comprehensive information about the state of the cluster. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, blocks, metadata, nodes, routing_table, - routing_nodes, master_node, version - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + metrics. Valid choices are _all, blocks, metadata, nodes, routing_table, + routing_nodes, master_node, cluster_manager_node, version. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_metadata_version: Wait for the metadata version to - be equal or greater than the specified metadata version + be equal or greater than the specified metadata version. :arg wait_for_timeout: The maximum time to wait for - wait_for_metadata_version before timing out + wait_for_metadata_version before timing out. """ if index and metric in SKIP_IN_PATH: metric = "_all" @@ -149,18 +188,23 @@ def state(self, metric=None, index=None, params=None, headers=None): ) @query_params("flat_settings", "timeout") - def stats(self, node_id=None, params=None, headers=None): + def stats( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns high-level overview of cluster statistics. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg flat_settings: Return settings in flat format (default: - false) - :arg timeout: Explicit operation timeout + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", @@ -172,83 +216,99 @@ def stats(self, node_id=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "explain", "master_timeout", - "cluster_manager_timeout", "metric", "retry_failed", "timeout", ) - def reroute(self, body=None, params=None, headers=None): + def reroute( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to manually change the allocation of individual shards in the cluster. :arg body: The definition of `commands` to perform (`move`, `cancel`, `allocate`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: Simulate the operation only and return the - resulting state + resulting state. :arg explain: Return an explanation of why the commands can or - cannot be executed - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + cannot be executed. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg metric: Limit the information returned to the specified - metrics. Defaults to all but metadata Valid choices: _all, blocks, - metadata, nodes, routing_table, master_node, version + metrics. Defaults to all but metadata. :arg retry_failed: Retries allocation of shards that are blocked - due to too many subsequent allocation failures - :arg timeout: Explicit operation timeout + due to too many subsequent allocation failures. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", "/_cluster/reroute", params=params, headers=headers, body=body ) @query_params( + "cluster_manager_timeout", "flat_settings", "include_defaults", "master_timeout", - "cluster_manager_timeout", "timeout", ) - def get_settings(self, params=None, headers=None): + def get_settings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns cluster settings. - :arg flat_settings: Return settings in flat format (default: - false) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg include_defaults: Whether to return all default clusters - setting. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + setting. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", "/_cluster/settings", params=params, headers=headers ) @query_params( - "flat_settings", "master_timeout", "cluster_manager_timeout", "timeout" + "cluster_manager_timeout", "flat_settings", "master_timeout", "timeout" ) - def put_settings(self, body, params=None, headers=None): + def put_settings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster settings. :arg body: The settings to be updated. Can be either `transient` or `persistent` (survives cluster restart). - :arg flat_settings: Return settings in flat format (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -258,7 +318,11 @@ def put_settings(self, body, params=None, headers=None): ) @query_params() - def remote_info(self, params=None, headers=None): + def remote_info( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns the information about configured remote clusters. @@ -268,7 +332,12 @@ def remote_info(self, params=None, headers=None): ) @query_params("include_disk_info", "include_yes_decisions") - def allocation_explain(self, body=None, params=None, headers=None): + def allocation_explain( + self, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides explanations for shard allocations in the cluster. @@ -276,9 +345,9 @@ def allocation_explain(self, body=None, params=None, headers=None): :arg body: The index, shard, and primary flag to explain. Empty means 'explain the first unassigned shard' :arg include_disk_info: Return information about disk usage and - shard sizes (default: false) + shard sizes. Default is false. :arg include_yes_decisions: Return 'YES' decisions in - explanation (default: false) + explanation. Default is false. """ return self.transport.perform_request( "POST", @@ -288,16 +357,24 @@ def allocation_explain(self, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_component_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes a component template + Deletes a component template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -309,19 +386,25 @@ def delete_component_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - def get_component_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + def get_component_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns one or more component templates + Returns one or more component templates. - :arg name: The comma separated names of the component templates + :arg name: The Comma-separated names of the component templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", @@ -330,19 +413,28 @@ def get_component_template(self, name=None, params=None, headers=None): headers=headers, ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "timeout") - def put_component_template(self, name, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "create", "master_timeout", "timeout") + def put_component_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Creates or updates a component template + Creates or updates a component template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -356,19 +448,25 @@ def put_component_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - def exists_component_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + def exists_component_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about whether a particular component template exist + Returns information about whether a particular component template exist. - :arg name: The name of the template + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -381,14 +479,18 @@ def exists_component_template(self, name, params=None, headers=None): ) @query_params("wait_for_removal") - def delete_voting_config_exclusions(self, params=None, headers=None): + def delete_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears cluster voting config exclusions. :arg wait_for_removal: Specifies whether to wait for all excluded nodes to be removed from the cluster before clearing the voting - configuration exclusions list. Default: True + configuration exclusions list. Default is True. """ return self.transport.perform_request( "DELETE", @@ -398,19 +500,166 @@ def delete_voting_config_exclusions(self, params=None, headers=None): ) @query_params("node_ids", "node_names", "timeout") - def post_voting_config_exclusions(self, params=None, headers=None): + def post_voting_config_exclusions( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the cluster voting config exclusions by node ids or node names. - :arg node_ids: A comma-separated list of the persistent ids of - the nodes to exclude from the voting configuration. If specified, you - may not also specify ?node_names. - :arg node_names: A comma-separated list of the names of the + :arg node_ids: Comma-separated list of the persistent ids of the nodes to exclude from the voting configuration. If specified, you may - not also specify ?node_ids. - :arg timeout: Explicit operation timeout Default: 30s + not also specify ?node_names. + :arg node_names: Comma-separated list of the names of the nodes + to exclude from the voting configuration. If specified, you may not also + specify ?node_ids. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", "/_cluster/voting_config_exclusions", params=params, headers=headers ) + + @query_params() + def delete_decommission_awareness( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Delete any existing decommission. + + """ + return self.transport.perform_request( + "DELETE", + "/_cluster/decommission/awareness/", + params=params, + headers=headers, + ) + + @query_params() + def delete_weighted_routing( + self, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Delete weighted shard routing weights. + + """ + return self.transport.perform_request( + "DELETE", + "/_cluster/routing/awareness/weights", + params=params, + headers=headers, + ) + + @query_params() + def get_decommission_awareness( + self, + awareness_attribute_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Get details and status of decommissioned attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + """ + if awareness_attribute_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'awareness_attribute_name'." + ) + + return self.transport.perform_request( + "GET", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + "_status", + ), + params=params, + headers=headers, + ) + + @query_params() + def get_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Fetches weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return self.transport.perform_request( + "GET", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) + + @query_params() + def put_decommission_awareness( + self, + awareness_attribute_name: Any, + awareness_attribute_value: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Decommissions an awareness attribute. + + + :arg awareness_attribute_name: Awareness attribute name. + :arg awareness_attribute_value: Awareness attribute value. + """ + for param in (awareness_attribute_name, awareness_attribute_value): + if param in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument.") + + return self.transport.perform_request( + "PUT", + _make_path( + "_cluster", + "decommission", + "awareness", + awareness_attribute_name, + awareness_attribute_value, + ), + params=params, + headers=headers, + ) + + @query_params() + def put_weighted_routing( + self, + attribute: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Updates weighted shard routing weights. + + + :arg attribute: Awareness attribute name. + """ + if attribute in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'attribute'.") + + return self.transport.perform_request( + "PUT", + _make_path("_cluster", "routing", "awareness", attribute, "weights"), + params=params, + headers=headers, + ) diff --git a/opensearchpy/client/cluster.pyi b/opensearchpy/client/cluster.pyi deleted file mode 100644 index 49b27c54..00000000 --- a/opensearchpy/client/cluster.pyi +++ /dev/null @@ -1,344 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class ClusterClient(NamespacedClient): - def health( - self, - *, - index: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - level: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - wait_for_events: Optional[Any] = ..., - wait_for_no_initializing_shards: Optional[Any] = ..., - wait_for_no_relocating_shards: Optional[Any] = ..., - wait_for_nodes: Optional[Any] = ..., - wait_for_status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def pending_tasks( - self, - *, - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def state( - self, - *, - metric: Optional[Any] = ..., - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_metadata_version: Optional[Any] = ..., - wait_for_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reroute( - self, - *, - body: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - explain: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - metric: Optional[Any] = ..., - retry_failed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - flat_settings: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - flat_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def remote_info( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def allocation_explain( - self, - *, - body: Optional[Any] = ..., - include_disk_info: Optional[Any] = ..., - include_yes_decisions: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_component_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_component_template( - self, - *, - name: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_component_template( - self, - name: Any, - *, - body: Any, - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_component_template( - self, - name: Any, - *, - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def delete_voting_config_exclusions( - self, - *, - wait_for_removal: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def post_voting_config_exclusions( - self, - *, - node_ids: Optional[Any] = ..., - node_names: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/dangling_indices.py b/opensearchpy/client/dangling_indices.py index 4d1b5a36..8617708e 100644 --- a/opensearchpy/client/dangling_indices.py +++ b/opensearchpy/client/dangling_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,24 +26,44 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class DanglingIndicesClient(NamespacedClient): @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def delete_dangling_index(self, index_uuid, params=None, headers=None): + def delete_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified dangling index + Deletes the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to delete - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -55,19 +76,27 @@ def delete_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params( - "accept_data_loss", "master_timeout", "cluster_manager_timeout", "timeout" + "accept_data_loss", "cluster_manager_timeout", "master_timeout", "timeout" ) - def import_dangling_index(self, index_uuid, params=None, headers=None): + def import_dangling_index( + self, + index_uuid: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Imports the specified dangling index + Imports the specified dangling index. - :arg index_uuid: The UUID of the dangling index + :arg index_uuid: The UUID of the dangling index. :arg accept_data_loss: Must be set to true in order to import - the dangling index - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + the dangling index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index_uuid in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index_uuid'.") @@ -77,7 +106,11 @@ def import_dangling_index(self, index_uuid, params=None, headers=None): ) @query_params() - def list_dangling_indices(self, params=None, headers=None): + def list_dangling_indices( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns all dangling indices. diff --git a/opensearchpy/client/dangling_indices.pyi b/opensearchpy/client/dangling_indices.pyi deleted file mode 100644 index 56e4a72f..00000000 --- a/opensearchpy/client/dangling_indices.pyi +++ /dev/null @@ -1,89 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class DanglingIndicesClient(NamespacedClient): - def delete_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def import_dangling_index( - self, - index_uuid: Any, - *, - accept_data_loss: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def list_dangling_indices( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/features.py b/opensearchpy/client/features.py index a9e6ab95..c6520fa1 100644 --- a/opensearchpy/client/features.py +++ b/opensearchpy/client/features.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class FeaturesClient(NamespacedClient): @query_params("master_timeout", "cluster_manager_timeout") - def get_features(self, params=None, headers=None): + def get_features(self, params: Any = None, headers: Any = None) -> Any: """ Gets a list of features which can be included in snapshots using the feature_states field when creating a snapshot @@ -46,7 +49,7 @@ def get_features(self, params=None, headers=None): ) @query_params() - def reset_features(self, params=None, headers=None): + def reset_features(self, params: Any = None, headers: Any = None) -> Any: """ Resets the internal state of features, usually by deleting system indices diff --git a/opensearchpy/client/features.pyi b/opensearchpy/client/features.pyi deleted file mode 100644 index 8da34e42..00000000 --- a/opensearchpy/client/features.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class FeaturesClient(NamespacedClient): - def get_features( - self, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reset_features( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/indices.py b/opensearchpy/client/indices.py index 138692e1..7cdc7e57 100644 --- a/opensearchpy/client/indices.py +++ b/opensearchpy/client/indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,30 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndicesClient(NamespacedClient): @query_params() - def analyze(self, body=None, index=None, params=None, headers=None): + def analyze( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the analysis process on a text and return the tokens breakdown of the text. @@ -38,7 +57,7 @@ def analyze(self, body=None, index=None, params=None, headers=None): :arg body: Define analyzer/tokenizer parameters and the text on which the analysis should be performed - :arg index: The name of the index to scope the operation + :arg index: The name of the index to scope the operation. """ return self.transport.perform_request( "POST", @@ -49,21 +68,26 @@ def analyze(self, body=None, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def refresh(self, index=None, params=None, headers=None): + def refresh( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the refresh operation in one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return self.transport.perform_request( "POST", _make_path(index, "_refresh"), params=params, headers=headers @@ -76,49 +100,63 @@ def refresh(self, index=None, params=None, headers=None): "ignore_unavailable", "wait_if_ongoing", ) - def flush(self, index=None, params=None, headers=None): + def flush( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the flush operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg force: Whether a flush should be forced even if it is not necessarily needed ie. if no changes will be committed to the index. This is useful if transaction log IDs should be incremented even if no uncommitted changes are present. (This setting can be considered as - internal) + internal). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg wait_if_ongoing: If set to true the flush operation will block until the flush can be executed if another flush operation is - already executing. The default is true. If set to false the flush will - be skipped iff if another flush operation is already running. + already executing. If set to false the flush will be skipped iff if + another flush operation is already running. Default is True. """ return self.transport.perform_request( "POST", _make_path(index, "_flush"), params=params, headers=headers ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def create(self, index, body=None, params=None, headers=None): + def create( + self, + index: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates an index with optional settings and mappings. - :arg index: The name of the index + :arg index: Index name. :arg body: The configuration for the index (`settings` and `mappings`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for before the operation returns. """ @@ -130,20 +168,30 @@ def create(self, index, body=None, params=None, headers=None): ) @query_params( - "master_timeout", "cluster_manager_timeout", "timeout", "wait_for_active_shards" + "cluster_manager_timeout", "master_timeout", "timeout", "wait_for_active_shards" ) - def clone(self, index, target, body=None, params=None, headers=None): + def clone( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Clones an index + Clones an index. - :arg index: The name of the source index to clone - :arg target: The name of the target index to clone into + :arg index: The name of the source index to clone. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the cloned index before the operation returns. """ @@ -161,35 +209,45 @@ def clone(self, index, target, body=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) - def get(self, index, params=None, headers=None): + def get( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about one or more indices. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -200,30 +258,38 @@ def get(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - def open(self, index, params=None, headers=None): + def open( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Opens an index. - :arg index: A comma separated list of indices to open + :arg index: Comma-separated list of indices to open. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to wait for before the operation returns. """ @@ -236,34 +302,40 @@ def open(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - def close(self, index, params=None, headers=None): + def close( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Closes an index. - :arg index: A comma separated list of indices to close + :arg index: Comma-separated list of indices to close. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. Set to `index-setting` to wait - according to the index setting `index.write.wait_for_active_shards`, or - `all` to wait for all shards, or an integer. Defaults to `0`. + wait for before the operation returns. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -274,29 +346,39 @@ def close(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) - def delete(self, index, params=None, headers=None): + def delete( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index. - :arg index: A comma-separated list of indices to delete; use - `_all` or `*` string to delete all indices - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg index: Comma-separated list of indices to delete; use + `_all` or `*` string to delete all indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -313,25 +395,32 @@ def delete(self, index, params=None, headers=None): "include_defaults", "local", ) - def exists(self, index, params=None, headers=None): + def exists( + self, + index: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index exists. - :arg index: A comma-separated list of index names - :arg allow_no_indices: Ignore if a wildcard expression resolves - to no concrete indices (default: false) - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) - :arg ignore_unavailable: Ignore unavailable indexes (default: - false) + :arg index: Comma-separated list of indices. + :arg allow_no_indices: Whether to ignore if a wildcard indices + expression resolves into no concrete indices. (This includes `_all` + string or when no indices have been specified). Default is false. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg ignore_unavailable: Whether specified concrete indices + should be ignored when unavailable (missing or closed). Default is + false. :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if index in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'index'.") @@ -342,35 +431,43 @@ def exists(self, index, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", "write_index_only", ) - def put_mapping(self, body, index=None, params=None, headers=None): + def put_mapping( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index mappings. :arg body: The mapping definition - :arg index: A comma-separated list of index names the mapping - should be added to (supports wildcards); use `_all` or omit to add the - mapping on all indices. + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg write_index_only: When true, applies mappings only to the - write index of an alias or data stream + write index of an alias or data stream. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -388,36 +485,42 @@ def put_mapping(self, body, index=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "local", "master_timeout", - "cluster_manager_timeout", ) - def get_mapping(self, index=None, params=None, headers=None): + def get_mapping( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mappings for one or more indices. - :arg index: A comma-separated list of index names + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg local (Deprecated: This parameter is a no-op and field + mappings are always retrieved locally.): Return local information, do + not retrieve the state from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( - "GET", - _make_path(index, "_mapping"), - params=params, - headers=headers, + "GET", _make_path(index, "_mapping"), params=params, headers=headers ) @query_params( @@ -427,25 +530,31 @@ def get_mapping(self, index=None, params=None, headers=None): "include_defaults", "local", ) - def get_field_mapping(self, fields, index=None, params=None, headers=None): + def get_field_mapping( + self, + fields: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns mapping for one or more fields. - :arg fields: A comma-separated list of fields - :arg index: A comma-separated list of index names + :arg fields: Comma-separated list of fields. + :arg index: Comma-separated list of indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether the default mapping values should - be returned as well + be returned as well. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if fields in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'fields'.") @@ -457,21 +566,30 @@ def get_field_mapping(self, fields, index=None, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def put_alias(self, index, name, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def put_alias( + self, + index: Any, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an alias. - :arg index: A comma-separated list of index names the alias - should point to (supports wildcards); use `_all` to perform the - operation on all indices. - :arg name: The name of the alias to be created or updated + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: The name of the alias to be created or updated. :arg body: The settings for the alias, such as `routing` or `filter` - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -486,24 +604,29 @@ def put_alias(self, index, name, body=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def exists_alias(self, name, index=None, params=None, headers=None): + def exists_alias( + self, + name: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular alias exists. - :arg name: A comma-separated list of alias names to return - :arg index: A comma-separated list of index names to filter - aliases + :arg name: Comma-separated list of alias names. + :arg index: Comma-separated list of indices to filter aliases. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -513,39 +636,52 @@ def exists_alias(self, name, index=None, params=None, headers=None): ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable", "local") - def get_alias(self, index=None, name=None, params=None, headers=None): + def get_alias( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an alias. - :arg index: A comma-separated list of index names to filter - aliases - :arg name: A comma-separated list of alias names to return + :arg index: Comma-separated list of indices to filter aliases. + :arg name: Comma-separated list of alias names. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) + from cluster-manager node. Default is false. """ return self.transport.perform_request( "GET", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def update_aliases(self, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def update_aliases( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates index aliases. :arg body: The definition of `actions` to perform - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Request timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -554,19 +690,28 @@ def update_aliases(self, body, params=None, headers=None): "POST", "/_aliases", params=params, headers=headers, body=body ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_alias(self, index, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_alias( + self, + index: Any, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an alias. - :arg index: A comma-separated list of index names (supports - wildcards); use `_all` for all indices - :arg name: A comma-separated list of aliases to delete (supports + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of aliases to delete (supports wildcards); use `_all` to delete all aliases for the specified indices. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit timestamp for the document + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, name): if param in SKIP_IN_PATH: @@ -576,21 +721,30 @@ def delete_alias(self, index, name, params=None, headers=None): "DELETE", _make_path(index, "_alias", name), params=params, headers=headers ) - @query_params("create", "master_timeout", "cluster_manager_timeout", "order") - def put_template(self, name, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "create", "master_timeout", "order") + def put_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg order: The order for this template when merging multiple matching ones (higher numbers are merged later, overriding the lower - numbers) + numbers). """ for param in (name, body): if param in SKIP_IN_PATH: @@ -604,21 +758,27 @@ def put_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - def exists_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + def exists_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -627,36 +787,50 @@ def exists_template(self, name, params=None, headers=None): "HEAD", _make_path("_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - def get_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + def get_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_template", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -667,38 +841,47 @@ def delete_template(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "include_defaults", "local", "master_timeout", - "cluster_manager_timeout", ) - def get_settings(self, index=None, name=None, params=None, headers=None): + def get_settings( + self, + index: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns settings for one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg name: The name of the settings that should be included + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. + :arg name: Comma-separated list of settings. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: all - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg include_defaults: Whether to return all default setting for - each of the indices. + each of the indices. Default is false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path(index, "_settings", name), params=params, headers=headers @@ -706,38 +889,47 @@ def get_settings(self, index=None, name=None, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "flat_settings", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "preserve_existing", "timeout", ) - def put_settings(self, body, index=None, params=None, headers=None): + def put_settings( + self, + body: Any, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates the index settings. :arg body: The index settings to be updated - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flat_settings: Return settings in flat format (default: - false) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg flat_settings: Return settings in flat format. Default is + false. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg preserve_existing: Whether to update existing settings. If - set to `true` existing settings on an index remain unchanged, the - default is `false` - :arg timeout: Explicit operation timeout + set to `true` existing settings on an index remain unchanged. Default is + false. + :arg timeout: Operation timeout. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -760,43 +952,46 @@ def put_settings(self, body, index=None, params=None, headers=None): "include_segment_file_sizes", "include_unloaded_segments", "level", - "types", ) - def stats(self, index=None, metric=None, params=None, headers=None): + def stats( + self, + index: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in an index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg metric: Limit the information returned the specific - metrics. Valid choices: _all, completion, docs, fielddata, query_cache, - flush, get, indexing, merge, request_cache, refresh, search, segments, - store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) + metrics. Valid choices are _all, store, indexing, get, search, merge, + flush, refresh, query_cache, fielddata, docs, warmer, completion, + segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). :arg forbid_closed_indices: If set to false stats will also collected from closed indices if explicitly specified or if - expand_wildcards expands to closed indices Default: True - :arg groups: A comma-separated list of search groups for - `search` index metric + expand_wildcards expands to closed indices. Default is True. + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) + applies if segment stats are requested). Default is false. :arg include_unloaded_segments: If set to true segment stats will include stats for segments that are not currently loaded into - memory + memory. Default is false. :arg level: Return stats aggregated at cluster, index or shard - level Valid choices: cluster, indices, shards Default: indices - :arg types: A comma-separated list of document types for the - `indexing` index metric + level. Valid choices are cluster, indices, shards. """ return self.transport.perform_request( "GET", _make_path(index, "_stats", metric), params=params, headers=headers @@ -805,22 +1000,28 @@ def stats(self, index=None, metric=None, params=None, headers=None): @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "verbose" ) - def segments(self, index=None, params=None, headers=None): + def segments( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides low-level information about segments in a Lucene index. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg verbose: Includes detailed memory usage by Lucene. + should be ignored when unavailable (missing or closed). + :arg verbose: Includes detailed memory usage by Lucene. Default + is false. """ return self.transport.perform_request( "GET", _make_path(index, "_segments"), params=params, headers=headers @@ -840,38 +1041,41 @@ def segments(self, index=None, params=None, headers=None): "q", "rewrite", ) - def validate_query(self, body=None, index=None, params=None, headers=None): + def validate_query( + self, + body: Any = None, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows a user to validate a potentially expensive query without executing it. :arg body: The query definition specified with the Query DSL - :arg index: A comma-separated list of index names to restrict - the operation; use `_all` or empty string to perform the operation on - all indices - restrict the operation; leave empty to perform the operation on all - types + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg all_shards: Execute validation on all shards instead of one - random shard per index + random shard per index. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg analyze_wildcard: Specify whether wildcard and prefix - queries should be analyzed (default: false) - :arg analyzer: The analyzer to use for the query string + queries should be analyzed. Default is false. + :arg analyzer: The analyzer to use for the query string. :arg default_operator: The default operator for query string - query (AND or OR) Valid choices: AND, OR Default: OR + query (AND or OR). Valid choices are AND, OR. :arg df: The field to use as default where no field prefix is - given in the query string + given in the query string. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg explain: Return detailed information about the error + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg explain: Return detailed information about the error. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg lenient: Specify whether format-based query failures (such - as providing text to a numeric field) should be ignored - :arg q: Query in the Lucene query string syntax + as providing text to a numeric field) should be ignored. + :arg q: Query in the Lucene query string syntax. :arg rewrite: Provide a more detailed explanation showing the actual Lucene query that will be executed. """ @@ -892,43 +1096,53 @@ def validate_query(self, body=None, index=None, params=None, headers=None): "query", "request", ) - def clear_cache(self, index=None, params=None, headers=None): + def clear_cache( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clears all or specific caches for one or more indices. - :arg index: A comma-separated list of index name to limit the - operation + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fielddata: Clear field data - :arg fields: A comma-separated list of fields to clear when - using the `fielddata` parameter (default: all) + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. + :arg fielddata: Clear field data. + :arg fields: Comma-separated list of fields to clear when using + the `fielddata` parameter (default: all). :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg query: Clear query caches - :arg request: Clear request cache + should be ignored when unavailable (missing or closed). + :arg query: Clear query caches. + :arg request: Clear request cache. """ return self.transport.perform_request( "POST", _make_path(index, "_cache", "clear"), params=params, headers=headers ) @query_params("active_only", "detailed") - def recovery(self, index=None, params=None, headers=None): + def recovery( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about ongoing index shard recoveries. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg active_only: Display only those recoveries that are - currently on-going + currently on-going. Default is false. :arg detailed: Whether to display detailed information about - shard recovery + shard recovery. Default is false. """ return self.transport.perform_request( "GET", _make_path(index, "_recovery"), params=params, headers=headers @@ -941,97 +1155,86 @@ def recovery(self, index=None, params=None, headers=None): "only_ancient_segments", "wait_for_completion", ) - def upgrade(self, index=None, params=None, headers=None): + def upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - DEPRECATED Upgrades to the current version of Lucene. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg only_ancient_segments: If true, only ancient (an older - Lucene major release) segments will be upgraded - :arg wait_for_completion: Specify whether the request should - block until the all segments are upgraded (default: false) + Lucene major release) segments will be upgraded. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return self.transport.perform_request( "POST", _make_path(index, "_upgrade"), params=params, headers=headers ) @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def get_upgrade(self, index=None, params=None, headers=None): + def get_upgrade( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - DEPRECATED Returns a progress status of current upgrade. + The _upgrade API is no longer useful and will be removed. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). """ return self.transport.perform_request( "GET", _make_path(index, "_upgrade"), params=params, headers=headers ) - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def flush_synced(self, index=None, params=None, headers=None): - """ - Performs a synced flush operation on one or more indices. Synced flush is - deprecated. Use flush instead - - - :arg index: A comma-separated list of index names; use `_all` or - empty string for all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - return self.transport.perform_request( - "POST", - _make_path(index, "_flush", "synced"), - params=params, - headers=headers, - ) - @query_params( "allow_no_indices", "expand_wildcards", "ignore_unavailable", "status" ) - def shard_stores(self, index=None, params=None, headers=None): + def shard_stores( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides store information for shard copies of indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg status: A comma-separated list of statuses used to filter - on shards to get store information for Valid choices: green, yellow, - red, all + should be ignored when unavailable (missing or closed). + :arg status: Comma-separated list of statuses used to filter on + shards to get store information for. """ return self.transport.perform_request( "GET", _make_path(index, "_shard_stores"), params=params, headers=headers @@ -1045,53 +1248,68 @@ def shard_stores(self, index=None, params=None, headers=None): "max_num_segments", "only_expunge_deletes", ) - def forcemerge(self, index=None, params=None, headers=None): + def forcemerge( + self, + index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Performs the force merge operation on one or more indices. - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices + :arg index: Comma-separated list of indices; use `_all` or empty + string to perform the operation on all indices. :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg flush: Specify whether the index should be flushed after - performing the operation (default: true) + performing the operation. Default is True. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) + should be ignored when unavailable (missing or closed). :arg max_num_segments: The number of segments the index should - be merged into (default: dynamic) + be merged into (default: dynamic). :arg only_expunge_deletes: Specify whether the operation should - only expunge deleted documents + only expunge deleted documents. """ return self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params, headers=headers ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - def shrink(self, index, target, body=None, params=None, headers=None): + def shrink( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allow to shrink an existing index into a new index with fewer primary shards. - :arg index: The name of the source index to shrink - :arg target: The name of the target index to shrink into + :arg index: The name of the source index to shrink. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1108,27 +1326,37 @@ def shrink(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "copy_settings", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - def split(self, index, target, body=None, params=None, headers=None): + def split( + self, + index: Any, + target: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows you to split an existing index into a new index with more primary shards. - :arg index: The name of the source index to split - :arg target: The name of the target index to split into + :arg index: The name of the source index to split. + :arg target: The name of the target index. :arg body: The configuration for the target index (`settings` and `aliases`) + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg copy_settings: whether or not to copy settings from the - source index (defaults to false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + source index. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the shrunken index before the operation returns. """ @@ -1145,28 +1373,38 @@ def split(self, index, target, body=None, params=None, headers=None): ) @query_params( + "cluster_manager_timeout", "dry_run", "master_timeout", - "cluster_manager_timeout", "timeout", "wait_for_active_shards", ) - def rollover(self, alias, body=None, new_index=None, params=None, headers=None): + def rollover( + self, + alias: Any, + body: Any = None, + new_index: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates an alias to point to a new index when the existing index is considered to be too large or too old. - :arg alias: The name of the alias to rollover + :arg alias: The name of the alias to rollover. :arg body: The conditions that needs to be met for executing rollover - :arg new_index: The name of the rollover index + :arg new_index: The name of the rollover index. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg dry_run: If set to true the rollover action will only be - validated but not actually performed even if a condition matches. The - default is false - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + validated but not actually performed even if a condition matches. + Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. :arg wait_for_active_shards: Set the number of active shards to wait for on the newly created rollover index before the operation returns. @@ -1182,133 +1420,45 @@ def rollover(self, alias, body=None, new_index=None, params=None, headers=None): body=body, ) - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - def freeze(self, index, params=None, headers=None): - """ - Freezes an index. A frozen index has almost no overhead on the cluster (except - for maintaining its metadata in memory) and is read-only. - - - :arg index: The name of the index to freeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_freeze"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "ignore_unavailable", - "master_timeout", - "cluster_manager_timeout", - "timeout", - "wait_for_active_shards", - ) - def unfreeze(self, index, params=None, headers=None): - """ - Unfreezes an index. When a frozen index is unfrozen, the index goes through the - normal recovery process and becomes writeable again. - - - :arg index: The name of the index to unfreeze - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: closed - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout - :arg wait_for_active_shards: Sets the number of active shards to - wait for before the operation returns. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_unfreeze"), params=params, headers=headers - ) - - @query_params("allow_no_indices", "expand_wildcards", "ignore_unavailable") - def reload_search_analyzers(self, index, params=None, headers=None): - """ - Reloads an index's search analyzers and their resources. - - - :arg index: A comma-separated list of index names to reload - analyzers for - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "GET", - _make_path(index, "_reload_search_analyzers"), - params=params, - headers=headers, - ) - @query_params() - def create_data_stream(self, name, params=None, headers=None): + def create_data_stream( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Creates a data stream + Creates or updates a data stream. - :arg name: The name of the data stream + :arg name: The name of the data stream. + :arg body: The data stream definition """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") return self.transport.perform_request( - "PUT", _make_path("_data_stream", name), params=params, headers=headers + "PUT", + _make_path("_data_stream", name), + params=params, + headers=headers, + body=body, ) - @query_params("expand_wildcards") - def delete_data_stream(self, name, params=None, headers=None): + @query_params() + def delete_data_stream( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a data stream. - :arg name: A comma-separated list of data streams to delete; use - `*` to delete all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1317,16 +1467,24 @@ def delete_data_stream(self, name, params=None, headers=None): "DELETE", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_index_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes an index template. - :arg name: The name of the template - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1338,21 +1496,27 @@ def delete_index_template(self, name, params=None, headers=None): headers=headers, ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - def exists_index_template(self, name, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + def exists_index_template( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about whether a particular index template exists. - :arg name: The name of the template - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: The name of the template. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1361,40 +1525,55 @@ def exists_index_template(self, name, params=None, headers=None): "HEAD", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("flat_settings", "local", "master_timeout", "cluster_manager_timeout") - def get_index_template(self, name=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "flat_settings", "local", "master_timeout") + def get_index_template( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns an index template. - :arg name: The comma separated names of the index templates - :arg flat_settings: Return settings in flat format (default: - false) + :arg name: Comma-separated names of the index templates. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg flat_settings: Return settings in flat format. Default is + false. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_index_template", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - def put_index_template(self, name, body, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + def put_index_template( + self, + name: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates an index template. - :arg name: The name of the template + :arg name: The name of the template. :arg body: The template definition :arg cause: User defined reason for creating/updating the index - template + template. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template should only be added if - new or can also replace an existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + new or can also replace an existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (name, body): if param in SKIP_IN_PATH: @@ -1408,24 +1587,33 @@ def put_index_template(self, name, body, params=None, headers=None): body=body, ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - def simulate_index_template(self, name, body=None, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + def simulate_index_template( + self, + name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Simulate matching the given index name against the index templates in the - system + system. :arg name: The name of the index (it must be a concrete index - name) + name). :arg body: New index template definition, which will be included in the simulation, as if it already exists in the system :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1438,38 +1626,49 @@ def simulate_index_template(self, name, body=None, params=None, headers=None): body=body, ) - @query_params("expand_wildcards") - def get_data_stream(self, name=None, params=None, headers=None): + @query_params() + def get_data_stream( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns data streams. - :arg name: A comma-separated list of data streams to get; use - `*` to get all data streams - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return self.transport.perform_request( "GET", _make_path("_data_stream", name), params=params, headers=headers ) - @query_params("cause", "create", "master_timeout", "cluster_manager_timeout") - def simulate_template(self, body=None, name=None, params=None, headers=None): + @query_params("cause", "cluster_manager_timeout", "create", "master_timeout") + def simulate_template( + self, + body: Any = None, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Simulate resolving the given template name or body + Simulate resolving the given template name or body. :arg body: New index template definition to be simulated, if no index template name is specified - :arg name: The name of the index template + :arg name: The name of the template. :arg cause: User defined reason for dry-run creating the new - template for simulation purposes + template for simulation purposes. Default is false. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg create: Whether the index template we optionally defined in the body should only be dry-run added if new or can also replace an - existing one - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager + existing one. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "POST", @@ -1480,21 +1679,21 @@ def simulate_template(self, body=None, name=None, params=None, headers=None): ) @query_params("expand_wildcards") - def resolve_index(self, name, params=None, headers=None): + def resolve_index( + self, + name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Returns information about any matching indices, aliases, and data streams - - - .. warning:: + Returns information about any matching indices, aliases, and data streams. - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg name: A comma-separated list of names or wildcard - expressions - :arg expand_wildcards: Whether wildcard expressions should get - expanded to open or closed indices (default: open) Valid choices: open, - closed, hidden, none, all Default: open + :arg name: Comma-separated list of names or wildcard + expressions. + :arg expand_wildcards: Whether to expand wildcard expression to + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. """ if name in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'name'.") @@ -1505,31 +1704,40 @@ def resolve_index(self, name, params=None, headers=None): @query_params( "allow_no_indices", + "cluster_manager_timeout", "expand_wildcards", "ignore_unavailable", "master_timeout", - "cluster_manager_timeout", "timeout", ) - def add_block(self, index, block, params=None, headers=None): + def add_block( + self, + index: Any, + block: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Adds a block to an index. - :arg index: A comma separated list of indices to add a block to + :arg index: Comma-separated list of indices to add a block to. :arg block: The block to add (one of read, write, read_only or - metadata) + metadata). :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) + string or when no indices have been specified). + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open + concrete indices that are open, closed or both. Valid choices are all, + open, closed, hidden, none. :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Specify timeout for connection to master - :arg cluster_manager_timeout: Specify timeout for connection to cluster_manager - :arg timeout: Explicit operation timeout + should be ignored when unavailable (missing or closed). + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (index, block): if param in SKIP_IN_PATH: @@ -1540,13 +1748,18 @@ def add_block(self, index, block, params=None, headers=None): ) @query_params() - def data_streams_stats(self, name=None, params=None, headers=None): + def data_streams_stats( + self, + name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Provides statistics on operations happening in a data stream. - :arg name: A comma-separated list of data stream names; use - `_all` or empty string to perform the operation on all data streams + :arg name: Comma-separated list of data streams; use `_all` or + empty string to perform the operation on all data streams. """ return self.transport.perform_request( "GET", @@ -1554,115 +1767,3 @@ def data_streams_stats(self, name=None, params=None, headers=None): params=params, headers=headers, ) - - @query_params() - def promote_data_stream(self, name, params=None, headers=None): - """ - Promotes a data stream from a replicated data stream managed by CCR to a - regular data stream - - - :arg name: The name of the data stream - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return self.transport.perform_request( - "POST", - _make_path("_data_stream", "_promote", name), - params=params, - headers=headers, - ) - - @query_params() - def migrate_to_data_stream(self, name, params=None, headers=None): - """ - Migrates an alias to a data stream - - - :arg name: The name of the alias to migrate - """ - if name in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'name'.") - - return self.transport.perform_request( - "POST", - _make_path("_data_stream", "_migrate", name), - params=params, - headers=headers, - ) - - @query_params( - "allow_no_indices", - "expand_wildcards", - "flush", - "ignore_unavailable", - "run_expensive_tasks", - ) - def disk_usage(self, index, params=None, headers=None): - """ - Analyzes the disk usage of each field of an index or data stream - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: Comma-separated list of indices or data streams to - analyze the disk usage - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg flush: Whether flush or not before analyzing the index disk - usage. Defaults to true - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - :arg run_expensive_tasks: Must be set to [true] in order for the - task to be performed. Defaults to false. - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "POST", _make_path(index, "_disk_usage"), params=params, headers=headers - ) - - @query_params( - "allow_no_indices", "expand_wildcards", "fields", "ignore_unavailable" - ) - def field_usage_stats(self, index, params=None, headers=None): - """ - Returns the field usage stats for each field of an index - - - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg index: A comma-separated list of index names; use `_all` or - empty string to perform the operation on all indices - :arg allow_no_indices: Whether to ignore if a wildcard indices - expression resolves into no concrete indices. (This includes `_all` - string or when no indices have been specified) - :arg expand_wildcards: Whether to expand wildcard expression to - concrete indices that are open, closed or both. Valid choices: open, - closed, hidden, none, all Default: open - :arg fields: A comma-separated list of fields to include in the - stats if only a subset of fields should be returned (supports wildcards) - :arg ignore_unavailable: Whether specified concrete indices - should be ignored when unavailable (missing or closed) - """ - if index in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'index'.") - - return self.transport.perform_request( - "GET", - _make_path(index, "_field_usage_stats"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/indices.pyi b/opensearchpy/client/indices.pyi deleted file mode 100644 index 2393537a..00000000 --- a/opensearchpy/client/indices.pyi +++ /dev/null @@ -1,1254 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IndicesClient(NamespacedClient): - def analyze( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def refresh( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - force: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - wait_if_ongoing: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create( - self, - index: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def open( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def close( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def put_mapping( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - write_index_only: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_mapping( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_field_mapping( - self, - fields: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_alias( - self, - index: Any, - name: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_alias( - self, - name: Any, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_alias( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - local: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def update_aliases( - self, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_alias( - self, - index: Any, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_template( - self, - name: Any, - *, - body: Any, - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - order: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_template( - self, - name: Any, - *, - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_template( - self, - *, - name: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_settings( - self, - *, - index: Optional[Any] = ..., - name: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - include_defaults: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_settings( - self, - *, - body: Any, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - preserve_existing: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - index: Optional[Any] = ..., - metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - forbid_closed_indices: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def segments( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def validate_query( - self, - *, - body: Optional[Any] = ..., - index: Optional[Any] = ..., - all_shards: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - analyze_wildcard: Optional[Any] = ..., - analyzer: Optional[Any] = ..., - default_operator: Optional[Any] = ..., - df: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - explain: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - lenient: Optional[Any] = ..., - q: Optional[Any] = ..., - rewrite: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clear_cache( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fielddata: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - query: Optional[Any] = ..., - request: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def recovery( - self, - *, - index: Optional[Any] = ..., - active_only: Optional[Any] = ..., - detailed: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - only_ancient_segments: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_upgrade( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def flush_synced( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shard_stores( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - status: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def forcemerge( - self, - *, - index: Optional[Any] = ..., - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - max_num_segments: Optional[Any] = ..., - only_expunge_deletes: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def shrink( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def split( - self, - index: Any, - target: Any, - *, - body: Optional[Any] = ..., - copy_settings: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def rollover( - self, - alias: Any, - *, - body: Optional[Any] = ..., - new_index: Optional[Any] = ..., - dry_run: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def freeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def unfreeze( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_active_shards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def reload_search_analyzers( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_data_stream( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_index_template( - self, - name: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def exists_index_template( - self, - name: Any, - *, - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> bool: ... - def get_index_template( - self, - *, - name: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_index_template( - self, - name: Any, - *, - body: Any, - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_index_template( - self, - name: Any, - *, - body: Optional[Any] = ..., - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_data_stream( - self, - *, - name: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate_template( - self, - *, - body: Optional[Any] = ..., - name: Optional[Any] = ..., - cause: Optional[Any] = ..., - create: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def resolve_index( - self, - name: Any, - *, - expand_wildcards: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def add_block( - self, - index: Any, - block: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def data_streams_stats( - self, - *, - name: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def promote_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def migrate_to_data_stream( - self, - name: Any, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def disk_usage( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - flush: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - run_expensive_tasks: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def field_usage_stats( - self, - index: Any, - *, - allow_no_indices: Optional[Any] = ..., - expand_wildcards: Optional[Any] = ..., - fields: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/ingest.py b/opensearchpy/client/ingest.py index e40f1a3d..4bf558b9 100644 --- a/opensearchpy/client/ingest.py +++ b/opensearchpy/client/ingest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,42 +26,65 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IngestClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "summary") - def get_pipeline(self, id=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + def get_pipeline( + self, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a pipeline. - :arg id: Comma separated list of pipeline ids. Wildcards - supported - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg summary: Return pipelines without their definitions - (default: false) + :arg id: Comma-separated list of pipeline ids. Wildcards + supported. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_ingest", "pipeline", id), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def put_pipeline(self, id, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def put_pipeline( + self, + id: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates a pipeline. - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg body: The ingest definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ for param in (id, body): if param in SKIP_IN_PATH: @@ -74,18 +98,24 @@ def put_pipeline(self, id, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_pipeline(self, id, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_pipeline( + self, + id: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a pipeline. - :arg id: Pipeline ID - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg id: Pipeline ID. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if id in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'id'.") @@ -98,15 +128,21 @@ def delete_pipeline(self, id, params=None, headers=None): ) @query_params("verbose") - def simulate(self, body, id=None, params=None, headers=None): + def simulate( + self, + body: Any, + id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Allows to simulate a pipeline with example documents. :arg body: The simulate definition - :arg id: Pipeline ID + :arg id: Pipeline ID. :arg verbose: Verbose mode. Display data output for each - processor in executed pipeline + processor in executed pipeline. Default is false. """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") @@ -120,7 +156,11 @@ def simulate(self, body, id=None, params=None, headers=None): ) @query_params() - def processor_grok(self, params=None, headers=None): + def processor_grok( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of the built-in patterns. @@ -128,13 +168,3 @@ def processor_grok(self, params=None, headers=None): return self.transport.perform_request( "GET", "/_ingest/processor/grok", params=params, headers=headers ) - - @query_params() - def geo_ip_stats(self, params=None, headers=None): - """ - Returns statistical information about geoip databases - - """ - return self.transport.perform_request( - "GET", "/_ingest/geoip/stats", params=params, headers=headers - ) diff --git a/opensearchpy/client/ingest.pyi b/opensearchpy/client/ingest.pyi deleted file mode 100644 index bbc5aba2..00000000 --- a/opensearchpy/client/ingest.pyi +++ /dev/null @@ -1,143 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class IngestClient(NamespacedClient): - def get_pipeline( - self, - *, - id: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - summary: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def put_pipeline( - self, - id: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_pipeline( - self, - id: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def simulate( - self, - *, - body: Any, - id: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def processor_grok( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def geo_ip_stats( - self, - *, - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/nodes.py b/opensearchpy/client/nodes.py index 2773002b..6a7b5db1 100644 --- a/opensearchpy/client/nodes.py +++ b/opensearchpy/client/nodes.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,24 +26,40 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import NamespacedClient, _make_path, query_params class NodesClient(NamespacedClient): @query_params("timeout") def reload_secure_settings( - self, body=None, node_id=None, params=None, headers=None - ): + self, + body: Any = None, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Reloads secure settings. - :arg body: An object containing the password for the - opensearch keystore - :arg node_id: A comma-separated list of node IDs to span the + :arg body: An object containing the password for the opensearch + keystore + :arg node_id: Comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes. - :arg timeout: Explicit operation timeout + :arg timeout: Operation timeout. """ return self.transport.perform_request( "POST", @@ -53,21 +70,27 @@ def reload_secure_settings( ) @query_params("flat_settings", "timeout") - def info(self, node_id=None, metric=None, params=None, headers=None): + def info( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg metric: A comma-separated list of metrics you wish - returned. Leave empty to return all. Valid choices: settings, os, - process, jvm, thread_pool, transport, http, plugins, ingest - :arg flat_settings: Return settings in flat format (default: - false) - :arg timeout: Explicit operation timeout + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg metric: Comma-separated list of metrics you wish returned. + Leave empty to return all. Valid choices are settings, os, process, jvm, + thread_pool, transport, http, plugins, ingest. + :arg flat_settings: Return settings in flat format. Default is + false. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", _make_path("_nodes", node_id, metric), params=params, headers=headers @@ -79,49 +102,50 @@ def info(self, node_id=None, metric=None, params=None, headers=None): "fields", "groups", "include_segment_file_sizes", - "include_unloaded_segments", "level", "timeout", "types", ) def stats( - self, node_id=None, metric=None, index_metric=None, params=None, headers=None - ): + self, + node_id: Any = None, + metric: Any = None, + index_metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns statistical information about nodes in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, breaker, fs, http, indices, jvm, os, - process, thread_pool, transport, discovery, indexing_pressure + metrics. Valid choices are _all, breaker, fs, http, indices, jvm, os, + process, thread_pool, transport, discovery, indexing_pressure. :arg index_metric: Limit the information returned for `indices` metric to the specific index metrics. Isn't used if `indices` (or `all`) - metric isn't specified. Valid choices: _all, completion, docs, - fielddata, query_cache, flush, get, indexing, merge, request_cache, - refresh, search, segments, store, warmer, suggest - :arg completion_fields: A comma-separated list of fields for - `fielddata` and `suggest` index metric (supports wildcards) - :arg fielddata_fields: A comma-separated list of fields for - `fielddata` index metric (supports wildcards) - :arg fields: A comma-separated list of fields for `fielddata` - and `completion` index metric (supports wildcards) - :arg groups: A comma-separated list of search groups for - `search` index metric + metric isn't specified. Valid choices are _all, store, indexing, get, + search, merge, flush, refresh, query_cache, fielddata, docs, warmer, + completion, segments, translog, suggest, request_cache, recovery. + :arg completion_fields: Comma-separated list of fields for + `fielddata` and `suggest` index metric (supports wildcards). + :arg fielddata_fields: Comma-separated list of fields for + `fielddata` index metric (supports wildcards). + :arg fields: Comma-separated list of fields for `fielddata` and + `completion` index metric (supports wildcards). + :arg groups: Comma-separated list of search groups for `search` + index metric. :arg include_segment_file_sizes: Whether to report the aggregated disk usage of each one of the Lucene index files (only - applies if segment stats are requested) - :arg include_unloaded_segments: If set to true segment stats - will include stats for segments that are not currently loaded into - memory + applies if segment stats are requested). Default is false. :arg level: Return indices stats aggregated at index, node or - shard level Valid choices: indices, node, shards Default: node - :arg timeout: Explicit operation timeout - :arg types: A comma-separated list of document types for the - `indexing` index metric + shard level. Valid choices are indices, node, shards. + :arg timeout: Operation timeout. + :arg types: Comma-separated list of document types for the + `indexing` index metric. """ return self.transport.perform_request( "GET", @@ -133,26 +157,31 @@ def stats( @query_params( "doc_type", "ignore_idle_threads", "interval", "snapshots", "threads", "timeout" ) - def hot_threads(self, node_id=None, params=None, headers=None): + def hot_threads( + self, + node_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about hot threads on each node in the cluster. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes - :arg doc_type: The type to sample (default: cpu) Valid choices: - cpu, wait, block + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. + :arg doc_type: The type to sample. Valid choices are cpu, wait, + block. :arg ignore_idle_threads: Don't show threads that are in known- idle places, such as waiting on a socket select or pulling from an empty - task queue (default: true) - :arg interval: The interval for the second sampling of threads - :arg snapshots: Number of samples of thread stacktrace (default: - 10) + task queue. Default is True. + :arg interval: The interval for the second sampling of threads. + :arg snapshots: Number of samples of thread stacktrace. Default + is 10. :arg threads: Specify the number of threads to provide - information for (default: 3) - :arg timeout: Explicit operation timeout + information for. Default is 3. + :arg timeout: Operation timeout. """ # type is a reserved word so it cannot be used, use doc_type instead if "doc_type" in params: @@ -166,18 +195,24 @@ def hot_threads(self, node_id=None, params=None, headers=None): ) @query_params("timeout") - def usage(self, node_id=None, metric=None, params=None, headers=None): + def usage( + self, + node_id: Any = None, + metric: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns low-level information about REST actions usage on nodes. - :arg node_id: A comma-separated list of node IDs or names to - limit the returned information; use `_local` to return information from - the node you're connecting to, leave empty to get information from all - nodes + :arg node_id: Comma-separated list of node IDs or names to limit + the returned information; use `_local` to return information from the + node you're connecting to, leave empty to get information from all + nodes. :arg metric: Limit the information returned to the specified - metrics Valid choices: _all, rest_actions - :arg timeout: Explicit operation timeout + metrics. Valid choices are _all, rest_actions. + :arg timeout: Operation timeout. """ return self.transport.perform_request( "GET", diff --git a/opensearchpy/client/nodes.pyi b/opensearchpy/client/nodes.pyi deleted file mode 100644 index d0f7beb4..00000000 --- a/opensearchpy/client/nodes.pyi +++ /dev/null @@ -1,140 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class NodesClient(NamespacedClient): - def reload_secure_settings( - self, - *, - body: Optional[Any] = ..., - node_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def info( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - flat_settings: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def stats( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - index_metric: Optional[Any] = ..., - completion_fields: Optional[Any] = ..., - fielddata_fields: Optional[Any] = ..., - fields: Optional[Any] = ..., - groups: Optional[Any] = ..., - include_segment_file_sizes: Optional[Any] = ..., - include_unloaded_segments: Optional[Any] = ..., - level: Optional[Any] = ..., - timeout: Optional[Any] = ..., - types: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def hot_threads( - self, - *, - node_id: Optional[Any] = ..., - doc_type: Optional[Any] = ..., - ignore_idle_threads: Optional[Any] = ..., - interval: Optional[Any] = ..., - snapshots: Optional[Any] = ..., - threads: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def usage( - self, - *, - node_id: Optional[Any] = ..., - metric: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/plugins.py b/opensearchpy/client/plugins.py index 7fba8c32..b12214d7 100644 --- a/opensearchpy/client/plugins.py +++ b/opensearchpy/client/plugins.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,16 +8,20 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. - import warnings +from typing import Any from ..plugins.alerting import AlertingClient from ..plugins.index_management import IndexManagementClient +from .client import Client from .utils import NamespacedClient class PluginsClient(NamespacedClient): - def __init__(self, client): + alerting: Any + index_management: Any + + def __init__(self, client: Client) -> None: super(PluginsClient, self).__init__(client) # self.query_workbench = QueryWorkbenchClient(client) # self.reporting = ReportingClient(client) @@ -28,7 +33,7 @@ def __init__(self, client): self._dynamic_lookup(client) - def _dynamic_lookup(self, client): + def _dynamic_lookup(self, client: Any) -> None: # Issue : https://github.com/opensearch-project/opensearch-py/issues/90#issuecomment-1003396742 plugins = [ @@ -45,9 +50,7 @@ def _dynamic_lookup(self, client): setattr(client, plugin, getattr(self, plugin)) else: warnings.warn( - "Cannot load `{plugin}` directly to OpenSearch. `{plugin}` already exists in OpenSearch. Please use `OpenSearch.plugin.{plugin}` instead.".format( - plugin=plugin - ), + f"Cannot load `{plugin}` directly to {self.client.__class__.__name__} as it already exists. Use `{self.client.__class__.__name__}.plugin.{plugin}` instead.", category=RuntimeWarning, stacklevel=2, ) diff --git a/opensearchpy/client/plugins.pyi b/opensearchpy/client/plugins.pyi deleted file mode 100644 index 2e4b2630..00000000 --- a/opensearchpy/client/plugins.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any - -from ..client import OpenSearch -from ..plugins.alerting import AlertingClient as AlertingClient -from .utils import NamespacedClient as NamespacedClient - -class PluginsClient(NamespacedClient): - alerting: Any - index_management: Any - def __init__(self, client: OpenSearch) -> None: ... diff --git a/opensearchpy/client/remote.py b/opensearchpy/client/remote.py index 3f483697..5c1c0f0c 100644 --- a/opensearchpy/client/remote.py +++ b/opensearchpy/client/remote.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,12 +26,14 @@ # under the License. +from typing import Any + from .utils import NamespacedClient, query_params class RemoteClient(NamespacedClient): @query_params() - def info(self, params=None, headers=None): + def info(self, params: Any = None, headers: Any = None) -> Any: return self.transport.perform_request( "GET", "/_remote/info", params=params, headers=headers ) diff --git a/opensearchpy/client/remote.pyi b/opensearchpy/client/remote.pyi deleted file mode 100644 index 949301a7..00000000 --- a/opensearchpy/client/remote.pyi +++ /dev/null @@ -1,45 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class RemoteClient(NamespacedClient): - def info( - self, - *, - timeout: Optional[Any] = None, - pretty: Optional[bool] = None, - human: Optional[bool] = None, - error_trace: Optional[bool] = None, - format: Optional[str] = None, - filter_path: Optional[Union[str, Collection[str]]] = None, - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = None, - headers: Optional[MutableMapping[str, str]] = None, - ) -> Any: ... diff --git a/opensearchpy/client/remote_store.py b/opensearchpy/client/remote_store.py new file mode 100644 index 00000000..a019a99c --- /dev/null +++ b/opensearchpy/client/remote_store.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + +from .utils import SKIP_IN_PATH, NamespacedClient, query_params + + +class RemoteStoreClient(NamespacedClient): + @query_params("cluster_manager_timeout", "wait_for_completion") + def restore( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Restores from remote store. + + + :arg body: Comma-separated list of index IDs + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.transport.perform_request( + "POST", "/_remotestore/_restore", params=params, headers=headers, body=body + ) diff --git a/opensearchpy/client/security.py b/opensearchpy/client/security.py index 288f6676..6d1574ea 100644 --- a/opensearchpy/client/security.py +++ b/opensearchpy/client/security.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,46 +8,77 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + +from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SecurityClient(NamespacedClient): + from ._patch import health_check, update_audit_config # type: ignore + @query_params() - def get_account_details(self, params=None, headers=None): + def get_account_details( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns account details for the current user. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "account"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/account", params=params, headers=headers ) @query_params() - def change_password(self, body, params=None, headers=None): + def change_password( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Changes the password for the current user. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "account"), + "/_plugins/_security/api/account", params=params, headers=headers, body=body, ) @query_params() - def get_action_group(self, action_group, params=None, headers=None): + def get_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one action group. + + + :arg action_group: Action group to retrieve. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return self.transport.perform_request( @@ -57,25 +89,38 @@ def get_action_group(self, action_group, params=None, headers=None): ) @query_params() - def get_action_groups(self, params=None, headers=None): + def get_action_groups( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all action groups. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups/", params=params, headers=headers, ) @query_params() - def delete_action_group(self, action_group, params=None, headers=None): + def delete_action_group( + self, + action_group: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified action group. + Delete a specified action group. + + + :arg action_group: Action group to delete. """ if action_group in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'action-group'." + "Empty value passed for a required argument 'action_group'." ) return self.transport.perform_request( @@ -86,9 +131,19 @@ def delete_action_group(self, action_group, params=None, headers=None): ) @query_params() - def create_action_group(self, action_group, body, params=None, headers=None): + def create_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified action group. + + + :arg action_group: The name of the action group to create or + replace """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -103,9 +158,17 @@ def create_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_group(self, action_group, body, params=None, headers=None): + def patch_action_group( + self, + action_group: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an action group. + + """ for param in (action_group, body): if param in SKIP_IN_PATH: @@ -120,25 +183,39 @@ def patch_action_group(self, action_group, body, params=None, headers=None): ) @query_params() - def patch_action_groups(self, body, params=None, headers=None): + def patch_action_groups( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple action groups in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "actiongroups"), + "/_plugins/_security/api/actiongroups", params=params, headers=headers, body=body, ) @query_params() - def get_user(self, username, params=None, headers=None): + def get_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves one user. + Retrieve one internal user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -151,21 +228,33 @@ def get_user(self, username, params=None, headers=None): ) @query_params() - def get_users(self, params=None, headers=None): + def get_users( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves all users. + Retrieve all internal users. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, ) @query_params() - def delete_user(self, username, params=None, headers=None): + def delete_user( + self, + username: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified user. + Delete the specified user. + + """ if username in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'username'.") @@ -178,9 +267,17 @@ def delete_user(self, username, params=None, headers=None): ) @query_params() - def create_user(self, username, body, params=None, headers=None): + def create_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -195,9 +292,17 @@ def create_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_user(self, username, body, params=None, headers=None): + def patch_user( + self, + username: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of an internal user. + + """ for param in (username, body): if param in SKIP_IN_PATH: @@ -212,25 +317,39 @@ def patch_user(self, username, body, params=None, headers=None): ) @query_params() - def patch_users(self, body, params=None, headers=None): + def patch_users( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple internal users in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "internalusers"), + "/_plugins/_security/api/internalusers", params=params, headers=headers, body=body, ) @query_params() - def get_role(self, role, params=None, headers=None): + def get_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -243,21 +362,30 @@ def get_role(self, role, params=None, headers=None): ) @query_params() - def get_roles(self, params=None, headers=None): + def get_roles( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all roles. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "roles"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/roles/", params=params, headers=headers ) @query_params() - def delete_role(self, role, params=None, headers=None): + def delete_role( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified role. + Delete the specified role. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -270,9 +398,17 @@ def delete_role(self, role, params=None, headers=None): ) @query_params() - def create_role(self, role, body, params=None, headers=None): + def create_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -287,9 +423,17 @@ def create_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_role(self, role, body, params=None, headers=None): + def patch_role( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -304,25 +448,39 @@ def patch_role(self, role, body, params=None, headers=None): ) @query_params() - def patch_roles(self, body, params=None, headers=None): + def patch_roles( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates, updates, or deletes multiple roles in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "roles"), + "/_plugins/_security/api/roles", params=params, headers=headers, body=body, ) @query_params() - def get_role_mapping(self, role, params=None, headers=None): + def get_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -335,21 +493,33 @@ def get_role_mapping(self, role, params=None, headers=None): ) @query_params() - def get_role_mappings(self, params=None, headers=None): + def get_role_mappings( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all role mappings. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, ) @query_params() - def delete_role_mapping(self, role, params=None, headers=None): + def delete_role_mapping( + self, + role: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes the specified role mapping. + + """ if role in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'role'.") @@ -362,9 +532,17 @@ def delete_role_mapping(self, role, params=None, headers=None): ) @query_params() - def create_role_mapping(self, role, body, params=None, headers=None): + def create_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -379,9 +557,17 @@ def create_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mapping(self, role, body, params=None, headers=None): + def patch_role_mapping( + self, + role: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates individual attributes of a role mapping. + + """ for param in (role, body): if param in SKIP_IN_PATH: @@ -396,25 +582,39 @@ def patch_role_mapping(self, role, body, params=None, headers=None): ) @query_params() - def patch_role_mappings(self, body, params=None, headers=None): + def patch_role_mappings( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or updates multiple role mappings in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "rolesmapping"), + "/_plugins/_security/api/rolesmapping", params=params, headers=headers, body=body, ) @query_params() - def get_tenant(self, tenant, params=None, headers=None): + def get_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves one tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -427,21 +627,30 @@ def get_tenant(self, tenant, params=None, headers=None): ) @query_params() - def get_tenants(self, params=None, headers=None): + def get_tenants( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all tenants. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "tenants"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/tenants/", params=params, headers=headers ) @query_params() - def delete_tenant(self, tenant, params=None, headers=None): + def delete_tenant( + self, + tenant: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes the specified tenant. + Delete the specified tenant. + + """ if tenant in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'tenant'.") @@ -454,9 +663,17 @@ def delete_tenant(self, tenant, params=None, headers=None): ) @query_params() - def create_tenant(self, tenant, body, params=None, headers=None): + def create_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates or replaces the specified tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -471,9 +688,17 @@ def create_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenant(self, tenant, body, params=None, headers=None): + def patch_tenant( + self, + tenant: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify a single tenant. + + """ for param in (tenant, body): if param in SKIP_IN_PATH: @@ -488,69 +713,102 @@ def patch_tenant(self, tenant, body, params=None, headers=None): ) @query_params() - def patch_tenants(self, body, params=None, headers=None): + def patch_tenants( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Add, delete, or modify multiple tenants in a single call. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "tenants"), + "/_plugins/_security/api/tenants/", params=params, headers=headers, body=body, ) @query_params() - def get_configuration(self, params=None, headers=None): + def get_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the current Security plugin configuration in JSON format. + Returns the current Security plugin configuration in JSON format. + """ return self.transport.perform_request( "GET", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, ) @query_params() - def update_configuration(self, body, params=None, headers=None): + def update_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the current Security plugin configuration in JSON format. + Adds or updates the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_plugins", "_security", "api", "securityconfig", "config"), + "/_plugins/_security/api/securityconfig/config", params=params, headers=headers, body=body, ) @query_params() - def patch_configuration(self, body, params=None, headers=None): + def patch_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Updates the existing configuration using the REST API. + A PATCH call is used to update the existing configuration using the REST API. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_plugins", "_security", "api", "securityconfig"), + "/_plugins/_security/api/securityconfig", params=params, headers=headers, body=body, ) @query_params() - def get_distinguished_names(self, cluster_name=None, params=None, headers=None): + def get_distinguished_names( + self, + cluster_name: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Retrieves all distinguished names in the allow list. + + """ return self.transport.perform_request( "GET", @@ -560,13 +818,23 @@ def get_distinguished_names(self, cluster_name=None, params=None, headers=None): ) @query_params() - def update_distinguished_names(self, cluster_name, body, params=None, headers=None): + def update_distinguished_names( + self, + cluster_name: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Adds or updates the specified distinguished names in the cluster's or node's allow list. + Adds or updates the specified distinguished names in the cluster’s or node’s + allow list. + + """ - for param in (cluster_name, body): - if param in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument.") + if cluster_name in SKIP_IN_PATH: + raise ValueError( + "Empty value passed for a required argument 'cluster_name'." + ) return self.transport.perform_request( "PUT", @@ -577,13 +845,21 @@ def update_distinguished_names(self, cluster_name, body, params=None, headers=No ) @query_params() - def delete_distinguished_names(self, cluster_name, params=None, headers=None): + def delete_distinguished_names( + self, + cluster_name: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Deletes all distinguished names in the specified cluster's or node's allow list. + Deletes all distinguished names in the specified cluster’s or node’s allow + list. + + """ if cluster_name in SKIP_IN_PATH: raise ValueError( - "Empty value passed for a required argument 'cluster-name'." + "Empty value passed for a required argument 'cluster_name'." ) return self.transport.perform_request( @@ -594,106 +870,159 @@ def delete_distinguished_names(self, cluster_name, params=None, headers=None): ) @query_params() - def get_certificates(self, params=None, headers=None): + def get_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Retrieves the cluster's security certificates. + Retrieves the cluster’s security certificates. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "api", "ssl", "certs"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/ssl/certs", params=params, headers=headers ) @query_params() - def reload_transport_certificates(self, params=None, headers=None): + def reload_transport_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload transport layer communication certificates. + """ return self.transport.perform_request( "PUT", - _make_path( - "_opendistro", "_security", "api", "ssl", "transport", "reloadcerts" - ), + "/_plugins/_security/api/ssl/transport/reloadcerts", params=params, headers=headers, ) @query_params() - def reload_http_certificates(self, params=None, headers=None): + def reload_http_certificates( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - Reloads SSL certificates that are about to expire without restarting the OpenSearch node. + Reload HTTP layer communication certificates. + """ return self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "ssl", "http", "reloadcerts"), + "/_plugins/_security/api/ssl/http/reloadcerts", params=params, headers=headers, ) @query_params() - def flush_cache(self, params=None, headers=None): + def flush_cache( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Flushes the Security plugin user, authentication, and authorization cache. + """ return self.transport.perform_request( - "DELETE", - _make_path("_plugins", "_security", "api", "cache"), - params=params, - headers=headers, + "DELETE", "/_plugins/_security/api/cache", params=params, headers=headers ) @query_params() - def health_check(self, params=None, headers=None): + def health( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Checks to see if the Security plugin is up and running. + """ return self.transport.perform_request( - "GET", - _make_path("_plugins", "_security", "health"), - params=params, - headers=headers, + "GET", "/_plugins/_security/health", params=params, headers=headers ) @query_params() - def get_audit_configuration(self, params=None, headers=None): + def get_audit_configuration( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ - A GET call retrieves the audit configuration. + Retrieves the audit configuration. + """ return self.transport.perform_request( - "GET", - _make_path("_opendistro", "_security", "api", "audit"), - params=params, - headers=headers, + "GET", "/_plugins/_security/api/audit", params=params, headers=headers ) @query_params() - def update_audit_config(self, body, params=None, headers=None): + def update_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ - A PUT call updates the audit configuration. + Updates the audit configuration. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PUT", - _make_path("_opendistro", "_security", "api", "audit", "config"), + "/_plugins/_security/api/audit/config", params=params, headers=headers, body=body, ) @query_params() - def patch_audit_configuration(self, body, params=None, headers=None): + def patch_audit_configuration( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ A PATCH call is used to update specified fields in the audit configuration. + + + """ + if body in SKIP_IN_PATH: + raise ValueError("Empty value passed for a required argument 'body'.") + + return self.transport.perform_request( + "PATCH", + "/_plugins/_security/api/audit", + params=params, + headers=headers, + body=body, + ) + + @query_params() + def patch_distinguished_names( + self, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: + """ + Bulk update of distinguished names. + + """ if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'body'.") return self.transport.perform_request( "PATCH", - _make_path("_opendistro", "_security", "api", "audit"), + "/_plugins/_security/api/nodesdn", params=params, headers=headers, body=body, diff --git a/opensearchpy/client/security.pyi b/opensearchpy/client/security.pyi deleted file mode 100644 index de50b8b2..00000000 --- a/opensearchpy/client/security.pyi +++ /dev/null @@ -1,206 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient - -class SecurityClient(NamespacedClient): - def get_account_details( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def change_password( - self, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_action_group( - self, - action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_action_groups( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_action_group( - self, - action_group: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_action_group( - self, - action_group: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_action_group( - self, - action_group: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_action_groups( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_user( - self, - username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_users( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_user( - self, - username: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_user( - self, - username: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_user( - self, - username: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_users( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_roles( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def create_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_roles( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_role_mappings( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def create_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role_mapping( - self, role: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_role_mappings( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_tenants( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def delete_tenant( - self, - tenant: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def create_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_tenant( - self, - tenant: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def patch_tenants( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def update_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_distinguished_names( - self, - cluster_name: Union[Any, None] = ..., - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def update_distinguished_names( - self, - cluster_name: Any, - body: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def delete_distinguished_names( - self, - cluster_name: Any, - params: Union[Any, None] = ..., - headers: Union[Any, None] = ..., - ) -> Union[bool, Any]: ... - def get_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def reload_transport_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def reload_http_certificates( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def flush_cache( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def health_check( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def get_audit_configuration( - self, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def update_audit_config( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... - def patch_audit_configuration( - self, body: Any, params: Union[Any, None] = ..., headers: Union[Any, None] = ... - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/client/snapshot.py b/opensearchpy/client/snapshot.py index 200fff96..fe6536fa 100644 --- a/opensearchpy/client/snapshot.py +++ b/opensearchpy/client/snapshot.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,25 +26,45 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + +from typing import Any + from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class SnapshotClient(NamespacedClient): - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") - def create(self, repository, snapshot, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") + def create( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a snapshot in a repository. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: The snapshot definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -57,18 +78,25 @@ def create(self, repository, snapshot, body=None, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout") - def delete(self, repository, snapshot, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout") + def delete( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -82,33 +110,31 @@ def delete(self, repository, snapshot, params=None, headers=None): ) @query_params( - "ignore_unavailable", - "include_repository", - "index_details", - "master_timeout", - "cluster_manager_timeout", - "verbose", + "cluster_manager_timeout", "ignore_unavailable", "master_timeout", "verbose" ) - def get(self, repository, snapshot, params=None, headers=None): + def get( + self, + repository: Any, + snapshot: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg include_repository: Whether to include the repository name - in the snapshot info. Defaults to true. - :arg index_details: Whether to include details of each index in - the snapshot, if those details are available. Defaults to false. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg verbose: Whether to show verbose snapshot info or only show - the basic info found in the repository index blob + the basic info found in the repository index blob. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -121,19 +147,25 @@ def get(self, repository, snapshot, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def delete_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def delete_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Deletes a repository. :arg repository: Name of the snapshot repository to unregister. Wildcard (`*`) patterns are supported. - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -145,38 +177,51 @@ def delete_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("local", "master_timeout", "cluster_manager_timeout") - def get_repository(self, repository=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "local", "master_timeout") + def get_repository( + self, + repository: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a repository. - :arg repository: A comma-separated list of repository names + :arg repository: Comma-separated list of repository names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg local: Return local information, do not retrieve the state - from cluster_manager node (default: false) - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + from cluster-manager node. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", _make_path("_snapshot", repository), params=params, headers=headers ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout", "verify") - def create_repository(self, repository, body, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout", "verify") + def create_repository( + self, + repository: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Creates a repository. - :arg repository: A repository name + :arg repository: Repository name. :arg body: The repository definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout - :arg verify: Whether to verify the repository after creation + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. + :arg verify: Whether to verify the repository after creation. """ for param in (repository, body): if param in SKIP_IN_PATH: @@ -190,21 +235,29 @@ def create_repository(self, repository, body, params=None, headers=None): body=body, ) - @query_params("master_timeout", "cluster_manager_timeout", "wait_for_completion") - def restore(self, repository, snapshot, body=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "wait_for_completion") + def restore( + self, + repository: Any, + snapshot: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Restores a snapshot. - :arg repository: A repository name - :arg snapshot: A snapshot name + :arg repository: Repository name. + :arg snapshot: Snapshot name. :arg body: Details of what to restore - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. :arg wait_for_completion: Should this request wait until the - operation has completed before returning + operation has completed before returning. Default is false. """ for param in (repository, snapshot): if param in SKIP_IN_PATH: @@ -218,21 +271,28 @@ def restore(self, repository, snapshot, body=None, params=None, headers=None): body=body, ) - @query_params("ignore_unavailable", "master_timeout", "cluster_manager_timeout") - def status(self, repository=None, snapshot=None, params=None, headers=None): + @query_params("cluster_manager_timeout", "ignore_unavailable", "master_timeout") + def status( + self, + repository: Any = None, + snapshot: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about the status of a snapshot. - :arg repository: A repository name - :arg snapshot: A comma-separated list of snapshot names + :arg repository: Repository name. + :arg snapshot: Comma-separated list of snapshot names. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. :arg ignore_unavailable: Whether to ignore unavailable snapshots, defaults to false which means a SnapshotMissingException is - thrown - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + thrown. Default is false. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ return self.transport.perform_request( "GET", @@ -241,18 +301,24 @@ def status(self, repository=None, snapshot=None, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def verify_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def verify_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Verifies a repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -264,18 +330,24 @@ def verify_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout", "timeout") - def cleanup_repository(self, repository, params=None, headers=None): + @query_params("cluster_manager_timeout", "master_timeout", "timeout") + def cleanup_repository( + self, + repository: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Removes stale data from repository. - :arg repository: A repository name - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node - :arg timeout: Explicit operation timeout + :arg repository: Repository name. + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. + :arg timeout: Operation timeout. """ if repository in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument 'repository'.") @@ -287,22 +359,29 @@ def cleanup_repository(self, repository, params=None, headers=None): headers=headers, ) - @query_params("master_timeout", "cluster_manager_timeout") + @query_params("cluster_manager_timeout", "master_timeout") def clone( - self, repository, snapshot, target_snapshot, body, params=None, headers=None - ): + self, + repository: Any, + snapshot: Any, + target_snapshot: Any, + body: Any, + params: Any = None, + headers: Any = None, + ) -> Any: """ Clones indices from one snapshot into another snapshot in the same repository. - :arg repository: A repository name - :arg snapshot: The name of the snapshot to clone from - :arg target_snapshot: The name of the cloned snapshot to create + :arg repository: Repository name. + :arg snapshot: Snapshot name. + :arg target_snapshot: The name of the cloned snapshot to create. :arg body: The snapshot clone definition - :arg master_timeout (Deprecated: use cluster_manager_timeout): Explicit operation timeout for connection - to master node - :arg cluster_manager_timeout: Explicit operation timeout for connection - to cluster_manager node + :arg cluster_manager_timeout: Operation timeout for connection + to cluster-manager node. + :arg master_timeout (Deprecated: To promote inclusive language, + use 'cluster_manager_timeout' instead.): Operation timeout for + connection to master node. """ for param in (repository, snapshot, target_snapshot, body): if param in SKIP_IN_PATH: @@ -315,56 +394,3 @@ def clone( headers=headers, body=body, ) - - @query_params( - "blob_count", - "concurrency", - "detailed", - "early_read_node_count", - "max_blob_size", - "max_total_data_size", - "rare_action_probability", - "rarely_abort_writes", - "read_node_count", - "seed", - "timeout", - ) - def repository_analyze(self, repository, params=None, headers=None): - """ - Analyzes a repository for correctness and performance - - - :arg repository: A repository name - :arg blob_count: Number of blobs to create during the test. - Defaults to 100. - :arg concurrency: Number of operations to run concurrently - during the test. Defaults to 10. - :arg detailed: Whether to return detailed results or a summary. - Defaults to 'false' so that only the summary is returned. - :arg early_read_node_count: Number of nodes on which to perform - an early read on a blob, i.e. before writing has completed. Early reads - are rare actions so the 'rare_action_probability' parameter is also - relevant. Defaults to 2. - :arg max_blob_size: Maximum size of a blob to create during the - test, e.g '1gb' or '100mb'. Defaults to '10mb'. - :arg max_total_data_size: Maximum total size of all blobs to - create during the test, e.g '1tb' or '100gb'. Defaults to '1gb'. - :arg rare_action_probability: Probability of taking a rare - action such as an early read or an overwrite. Defaults to 0.02. - :arg rarely_abort_writes: Whether to rarely abort writes before - they complete. Defaults to 'true'. - :arg read_node_count: Number of nodes on which to read a blob - after writing. Defaults to 10. - :arg seed: Seed for the random number generator used to create - the test workload. Defaults to a random value. - :arg timeout: Explicit operation timeout. Defaults to '30s'. - """ - if repository in SKIP_IN_PATH: - raise ValueError("Empty value passed for a required argument 'repository'.") - - return self.transport.perform_request( - "POST", - _make_path("_snapshot", repository, "_analyze"), - params=params, - headers=headers, - ) diff --git a/opensearchpy/client/snapshot.pyi b/opensearchpy/client/snapshot.pyi deleted file mode 100644 index b1db95c0..00000000 --- a/opensearchpy/client/snapshot.pyi +++ /dev/null @@ -1,292 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class SnapshotClient(NamespacedClient): - def create( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete( - self, - repository: Any, - snapshot: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - repository: Any, - snapshot: Any, - *, - ignore_unavailable: Optional[Any] = ..., - include_repository: Optional[Any] = ..., - index_details: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - verbose: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def delete_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get_repository( - self, - *, - repository: Optional[Any] = ..., - local: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def create_repository( - self, - repository: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - verify: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def restore( - self, - repository: Any, - snapshot: Any, - *, - body: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def status( - self, - *, - repository: Optional[Any] = ..., - snapshot: Optional[Any] = ..., - ignore_unavailable: Optional[Any] = ..., - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def verify_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cleanup_repository( - self, - repository: Any, - *, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def clone( - self, - repository: Any, - snapshot: Any, - target_snapshot: Any, - *, - body: Any, - master_timeout: Optional[Any] = ..., - cluster_manager_timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def repository_analyze( - self, - repository: Any, - *, - blob_count: Optional[Any] = ..., - concurrency: Optional[Any] = ..., - detailed: Optional[Any] = ..., - early_read_node_count: Optional[Any] = ..., - max_blob_size: Optional[Any] = ..., - max_total_data_size: Optional[Any] = ..., - rare_action_probability: Optional[Any] = ..., - rarely_abort_writes: Optional[Any] = ..., - read_node_count: Optional[Any] = ..., - seed: Optional[Any] = ..., - timeout: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/tasks.py b/opensearchpy/client/tasks.py index fff32dd7..7e675233 100644 --- a/opensearchpy/client/tasks.py +++ b/opensearchpy/client/tasks.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,7 +26,18 @@ # under the License. +# ---------------------------------------------------- +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": +# https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py +# or the "OpenSearch API specification" available at: +# https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json +# ----------------------------------------------------- + + import warnings +from typing import Any from .utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params @@ -40,57 +52,58 @@ class TasksClient(NamespacedClient): "timeout", "wait_for_completion", ) - def list(self, params=None, headers=None): + def list( + self, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns a list of tasks. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - - :arg actions: A comma-separated list of actions that should be + :arg actions: Comma-separated list of actions that should be returned. Leave empty to return all. - :arg detailed: Return detailed task information (default: false) + :arg detailed: Return detailed task information. Default is + false. :arg group_by: Group tasks by nodes or parent/child - relationships Valid choices: nodes, parents, none Default: nodes - :arg nodes: A comma-separated list of node IDs or names to limit + relationships. Valid choices are nodes, parents, none. + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Return tasks with specified parent task id (node_id:task_number). Set to -1 to return all. - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return self.transport.perform_request( "GET", "/_tasks", params=params, headers=headers ) @query_params("actions", "nodes", "parent_task_id", "wait_for_completion") - def cancel(self, task_id=None, params=None, headers=None): + def cancel( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Cancels a task, if it can be cancelled through an API. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Cancel the task with specified task id - (node_id:task_number) - :arg actions: A comma-separated list of actions that should be + (node_id:task_number). + :arg actions: Comma-separated list of actions that should be cancelled. Leave empty to cancel all. - :arg nodes: A comma-separated list of node IDs or names to limit + :arg nodes: Comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the - node you're connecting to, leave empty to get information from all nodes + node you're connecting to, leave empty to get information from all + nodes. :arg parent_task_id: Cancel tasks with specified parent task id (node_id:task_number). Set to -1 to cancel all. - :arg wait_for_completion: Should the request block until the - cancellation of the task and its descendant tasks is completed. Defaults - to false + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ return self.transport.perform_request( "POST", @@ -100,21 +113,21 @@ def cancel(self, task_id=None, params=None, headers=None): ) @query_params("timeout", "wait_for_completion") - def get(self, task_id=None, params=None, headers=None): + def get( + self, + task_id: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Returns information about a task. - .. warning:: - - This API is **experimental** so may include breaking changes - or be removed in a future version - :arg task_id: Return the task with specified id - (node_id:task_number) - :arg timeout: Explicit operation timeout - :arg wait_for_completion: Wait for the matching tasks to - complete (default: false) + (node_id:task_number). + :arg timeout: Operation timeout. + :arg wait_for_completion: Should this request wait until the + operation has completed before returning. Default is false. """ if task_id in SKIP_IN_PATH: warnings.warn( diff --git a/opensearchpy/client/tasks.pyi b/opensearchpy/client/tasks.pyi deleted file mode 100644 index 3577bae3..00000000 --- a/opensearchpy/client/tasks.pyi +++ /dev/null @@ -1,94 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Collection, MutableMapping, Optional, Tuple, Union - -from .utils import NamespacedClient - -class TasksClient(NamespacedClient): - def list( - self, - *, - actions: Optional[Any] = ..., - detailed: Optional[Any] = ..., - group_by: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def cancel( - self, - *, - task_id: Optional[Any] = ..., - actions: Optional[Any] = ..., - nodes: Optional[Any] = ..., - parent_task_id: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... - def get( - self, - *, - task_id: Optional[Any] = ..., - timeout: Optional[Any] = ..., - wait_for_completion: Optional[Any] = ..., - pretty: Optional[bool] = ..., - human: Optional[bool] = ..., - error_trace: Optional[bool] = ..., - format: Optional[str] = ..., - filter_path: Optional[Union[str, Collection[str]]] = ..., - request_timeout: Optional[Union[int, float]] = ..., - ignore: Optional[Union[int, Collection[int]]] = ..., - opaque_id: Optional[str] = ..., - http_auth: Optional[Union[str, Tuple[str, str]]] = ..., - api_key: Optional[Union[str, Tuple[str, str]]] = ..., - params: Optional[MutableMapping[str, Any]] = ..., - headers: Optional[MutableMapping[str, str]] = ..., - ) -> Any: ... diff --git a/opensearchpy/client/utils.py b/opensearchpy/client/utils.py index a5e99b11..0663fd1d 100644 --- a/opensearchpy/client/utils.py +++ b/opensearchpy/client/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -31,14 +32,17 @@ import weakref from datetime import date, datetime from functools import wraps +from typing import Any, Callable, Optional -from ..compat import PY2, quote, string_types, to_bytes, to_str, unquote, urlparse +from opensearchpy.serializer import Serializer + +from ..compat import quote, string_types, to_bytes, to_str, unquote, urlparse # parts of URL to be omitted -SKIP_IN_PATH = (None, "", b"", [], ()) +SKIP_IN_PATH: Any = (None, "", b"", [], ()) -def _normalize_hosts(hosts): +def _normalize_hosts(hosts: Any) -> Any: """ Helper function to transform hosts argument to :class:`~opensearchpy.OpenSearch` to a list of dicts. @@ -56,7 +60,7 @@ def _normalize_hosts(hosts): for host in hosts: if isinstance(host, string_types): if "://" not in host: - host = "//%s" % host + host = "//%s" % host # type: ignore parsed_url = urlparse(host) h = {"host": parsed_url.hostname} @@ -83,7 +87,7 @@ def _normalize_hosts(hosts): return out -def _escape(value): +def _escape(value: Any) -> Any: """ Escape a single value of a URL string or a query parameter. If it is a list or tuple, turn it into a comma-separated string first. @@ -107,15 +111,13 @@ def _escape(value): # encode strings to utf-8 if isinstance(value, string_types): - if PY2 and isinstance(value, unicode): # noqa: F821 - return value.encode("utf-8") - if not PY2 and isinstance(value, str): + if isinstance(value, str): return value.encode("utf-8") return str(value) -def _make_path(*parts): +def _make_path(*parts: Any) -> str: """ Create a URL string from parts, omit all `None` values and empty strings. Convert lists and tuples to comma separated values. @@ -133,15 +135,15 @@ def _make_path(*parts): GLOBAL_PARAMS = ("pretty", "human", "error_trace", "format", "filter_path") -def query_params(*opensearch_query_params): +def query_params(*opensearch_query_params: Any) -> Callable: # type: ignore """ Decorator that pops all accepted parameters from method's kwargs and puts them in the params argument. """ - def _wrapper(func): + def _wrapper(func: Any) -> Any: @wraps(func) - def _wrapped(*args, **kwargs): + def _wrapped(*args: Any, **kwargs: Any) -> Any: params = (kwargs.pop("params", None) or {}).copy() headers = { k.lower(): v @@ -183,22 +185,22 @@ def _wrapped(*args, **kwargs): return _wrapper -def _bulk_body(serializer, body): +def _bulk_body(serializer: Optional[Serializer], body: Any) -> Any: # if not passed in a string, serialize items and join by newline if not isinstance(body, string_types): - body = "\n".join(map(serializer.dumps, body)) + body = "\n".join(map(serializer.dumps, body)) # type: ignore # bulk body must end with a newline if isinstance(body, bytes): if not body.endswith(b"\n"): body += b"\n" - elif isinstance(body, string_types) and not body.endswith("\n"): - body += "\n" + elif isinstance(body, string_types) and not body.endswith("\n"): # type: ignore + body += "\n" # type: ignore return body -def _base64_auth_header(auth_value): +def _base64_auth_header(auth_value: Any) -> str: """Takes either a 2-tuple or a base64-encoded string and returns a base64-encoded string to be used as an HTTP authorization header. @@ -209,17 +211,17 @@ def _base64_auth_header(auth_value): class NamespacedClient(object): - def __init__(self, client): + def __init__(self, client: Any) -> None: self.client = client @property - def transport(self): + def transport(self) -> Any: return self.client.transport class AddonClient(NamespacedClient): @classmethod - def infect_client(cls, client): + def infect_client(cls: Any, client: NamespacedClient) -> NamespacedClient: addon = cls(weakref.proxy(client)) setattr(client, cls.namespace, addon) return client diff --git a/opensearchpy/client/utils.pyi b/opensearchpy/client/utils.pyi deleted file mode 100644 index 4924fed9..00000000 --- a/opensearchpy/client/utils.pyi +++ /dev/null @@ -1,67 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import unicode_literals - -from typing import ( - Any, - Callable, - Collection, - Dict, - List, - Optional, - Tuple, - TypeVar, - Union, -) - -from ..client import OpenSearch -from ..serializer import Serializer -from ..transport import Transport - -T = TypeVar("T") -SKIP_IN_PATH: Collection[Any] - -def _normalize_hosts( - hosts: Optional[Union[str, Collection[Union[str, Dict[str, Any]]]]] -) -> List[Dict[str, Any]]: ... -def _escape(value: Any) -> str: ... -def _make_path(*parts: Any) -> str: ... - -GLOBAL_PARAMS: Tuple[str, ...] - -def query_params( - *es_query_params: str, -) -> Callable[[Callable[..., T]], Callable[..., T]]: ... -def _bulk_body( - serializer: Serializer, body: Union[str, bytes, Collection[Any]] -) -> str: ... - -class NamespacedClient: - client: OpenSearch - def __init__(self, client: OpenSearch) -> None: ... - @property - def transport(self) -> Transport: ... diff --git a/opensearchpy/compat.py b/opensearchpy/compat.py index a5169050..cb8bc7d7 100644 --- a/opensearchpy/compat.py +++ b/opensearchpy/compat.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,51 +26,29 @@ # under the License. -import sys +from collections.abc import Mapping +from queue import Queue +from typing import Tuple, Type, Union +from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse -PY2 = sys.version_info[0] == 2 +string_types = str, bytes +map = map -if PY2: - string_types = (basestring,) # noqa: F821 - from itertools import imap as map - from urllib import quote, quote_plus, unquote, urlencode - from Queue import Queue - from urlparse import urlparse +def to_str(x: Union[str, bytes], encoding: str = "ascii") -> str: + if not isinstance(x, str): + return x.decode(encoding) + return x - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.encode(encoding) - return x - to_bytes = to_str - -else: - string_types = str, bytes - from urllib.parse import quote, quote_plus, unquote, urlencode, urlparse - - map = map - from queue import Queue - - def to_str(x, encoding="ascii"): - if not isinstance(x, str): - return x.decode(encoding) - return x - - def to_bytes(x, encoding="ascii"): - if not isinstance(x, bytes): - return x.encode(encoding) - return x - - -try: - from collections.abc import Mapping -except ImportError: - from collections import Mapping +def to_bytes(x: Union[str, bytes], encoding: str = "ascii") -> bytes: + if not isinstance(x, bytes): + return x.encode(encoding) + return x try: - reraise_exceptions = (RecursionError,) + reraise_exceptions: Tuple[Type[BaseException], ...] = (RecursionError,) except NameError: reraise_exceptions = () diff --git a/opensearchpy/compat.pyi b/opensearchpy/compat.pyi deleted file mode 100644 index d3dc0a08..00000000 --- a/opensearchpy/compat.pyi +++ /dev/null @@ -1,54 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys -from typing import Callable, Tuple, Type, Union - -PY2: bool -string_types: Tuple[type, ...] - -to_str: Callable[[Union[str, bytes]], str] -to_bytes: Callable[[Union[str, bytes]], bytes] -reraise_exceptions: Tuple[Type[Exception], ...] - -if sys.version_info[0] == 2: - from itertools import imap as map - from urllib import quote as quote - from urllib import quote_plus as quote_plus - from urllib import unquote as unquote - from urllib import urlencode as urlencode - - from Queue import Queue as Queue - from urlparse import urlparse as urlparse -else: - from urllib.parse import quote as quote - from urllib.parse import quote_plus as quote_plus - from urllib.parse import unquote as unquote - from urllib.parse import urlencode as urlencode - from urllib.parse import urlparse as urlparse - - map = map - from queue import Queue as Queue diff --git a/opensearchpy/connection/__init__.py b/opensearchpy/connection/__init__.py index 1b9ad2cd..40037859 100644 --- a/opensearchpy/connection/__init__.py +++ b/opensearchpy/connection/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,9 +26,8 @@ # under the License. -import sys - from .base import Connection +from .http_async import AsyncHttpConnection from .http_requests import RequestsHttpConnection from .http_urllib3 import Urllib3HttpConnection, create_ssl_context @@ -36,17 +36,5 @@ "RequestsHttpConnection", "Urllib3HttpConnection", "create_ssl_context", + "AsyncHttpConnection", ] - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .http_async import AsyncHttpConnection - - __all__ += [ - "AsyncHttpConnection", - ] -except (ImportError, SyntaxError): - pass diff --git a/opensearchpy/connection/__init__.pyi b/opensearchpy/connection/__init__.pyi deleted file mode 100644 index ad1d9e62..00000000 --- a/opensearchpy/connection/__init__.pyi +++ /dev/null @@ -1,31 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection as Connection -from .http_async import AsyncHttpConnection as AsyncHttpConnection -from .http_requests import RequestsHttpConnection as RequestsHttpConnection -from .http_urllib3 import Urllib3HttpConnection as Urllib3HttpConnection -from .http_urllib3 import create_ssl_context as create_ssl_context diff --git a/opensearchpy/connection/async_connections.py b/opensearchpy/connection/async_connections.py index acaa0b68..670bbaeb 100644 --- a/opensearchpy/connection/async_connections.py +++ b/opensearchpy/connection/async_connections.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,6 +8,8 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + from six import string_types import opensearchpy @@ -15,16 +18,18 @@ class AsyncConnections(object): + _conns: Any + """ Class responsible for holding connections to different clusters. Used as a singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - async def configure(self, **kwargs): + async def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -47,13 +52,13 @@ async def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - async def add_connection(self, alias, conn): + async def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - async def remove_connection(self, alias): + async def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -68,7 +73,7 @@ async def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - async def create_connection(self, alias="default", **kwargs): + async def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.AsyncOpenSearch`` and register it under given alias. @@ -77,7 +82,7 @@ async def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.AsyncOpenSearch(**kwargs) return conn - async def get_connection(self, alias="default"): + async def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/async_connections.pyi b/opensearchpy/connection/async_connections.pyi deleted file mode 100644 index 8935ec6b..00000000 --- a/opensearchpy/connection/async_connections.pyi +++ /dev/null @@ -1,10 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -class AsyncConnections: ... diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 435996b9..59418bfa 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,7 +25,6 @@ # specific language governing permissions and limitations # under the License. - import gzip import io import logging @@ -32,13 +32,14 @@ import re import warnings from platform import python_version +from typing import Any, Collection, Dict, Mapping, Optional, Union try: import simplejson as json except ImportError: - import json + import json # type: ignore -from .. import __versionstr__ +from .._version import __versionstr__ from ..exceptions import HTTP_EXCEPTIONS, OpenSearchWarning, TransportError logger = logging.getLogger("opensearch") @@ -56,7 +57,7 @@ class Connection(object): """ Class responsible for maintaining a connection to an OpenSearch node. It - holds persistent connection pool to it and it's main interface + holds persistent connection pool to it and its main interface (`perform_request`) is thread-safe. Also responsible for logging. @@ -73,16 +74,16 @@ class Connection(object): def __init__( self, - host="localhost", - port=None, - use_ssl=False, - url_prefix="", - timeout=10, - headers=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + use_ssl: bool = False, + url_prefix: str = "", + timeout: int = 10, + headers: Optional[Dict[str, str]] = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + **kwargs: Any + ) -> None: if port is None: port = 9200 @@ -129,24 +130,24 @@ def __init__( self.url_prefix = url_prefix self.timeout = timeout - def __repr__(self): + def __repr__(self) -> str: return "<%s: %s>" % (self.__class__.__name__, self.host) - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, Connection): raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() - def __hash__(self): + def __hash__(self) -> int: return id(self) - def _gzip_compress(self, body): + def _gzip_compress(self, body: Any) -> bytes: buf = io.BytesIO() with gzip.GzipFile(fileobj=buf, mode="wb") as f: f.write(body) return buf.getvalue() - def _raise_warnings(self, warning_headers): + def _raise_warnings(self, warning_headers: Any) -> None: """If 'headers' contains a 'Warning' header raise the warnings to be seen by the user. Takes an iterable of string values from any number of 'Warning' headers. @@ -158,7 +159,7 @@ def _raise_warnings(self, warning_headers): # Format is: '(number) OpenSearch-(version)-(instance) "(message)"' warning_messages = [] for header in warning_headers: - # Because 'Requests' does it's own folding of multiple HTTP headers + # Because 'Requests' does its own folding of multiple HTTP headers # into one header delimited by commas (totally standard compliant, just # annoying for cases like this) we need to expect there may be # more than one message per 'Warning' header. @@ -173,7 +174,7 @@ def _raise_warnings(self, warning_headers): for message in warning_messages: warnings.warn(message, category=OpenSearchWarning) - def _pretty_json(self, data): + def _pretty_json(self, data: Union[str, bytes]) -> str: # pretty JSON in tracer curl logs try: return json.dumps( @@ -181,9 +182,17 @@ def _pretty_json(self, data): ).replace("'", r"\u0027") except (ValueError, TypeError): # non-json data or a bulk request - return data + return data # type: ignore - def _log_trace(self, method, path, body, status_code, response, duration): + def _log_trace( + self, + method: str, + path: str, + body: Optional[Union[str, bytes]], + status_code: Optional[int], + response: Optional[str], + duration: Optional[float], + ) -> None: if not tracer.isEnabledFor(logging.INFO) or not tracer.handlers: return @@ -209,29 +218,33 @@ def _log_trace(self, method, path, body, status_code, response, duration): def perform_request( self, - method, - url, - params=None, - body=None, - timeout=None, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: raise NotImplementedError() def log_request_success( - self, method, full_url, path, body, status_code, response, duration - ): + self, + method: str, + full_url: str, + path: str, + body: Any, + status_code: int, + response: str, + duration: float, + ) -> None: """Log a successful API call.""" # TODO: optionally pass in params instead of full_url and do urlencode only when needed # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.info( "%s %s [status:%s request:%.3fs]", method, full_url, status_code, duration @@ -243,15 +256,15 @@ def log_request_success( def log_request_fail( self, - method, - full_url, - path, - body, - duration, - status_code=None, - response=None, - exception=None, - ): + method: str, + full_url: str, + path: str, + body: Any, + duration: float, + status_code: Optional[int] = None, + response: Optional[str] = None, + exception: Optional[Exception] = None, + ) -> None: """Log an unsuccessful API call.""" # do not log 404s on HEAD requests if method == "HEAD" and status_code == 404: @@ -267,11 +280,8 @@ def log_request_fail( # body has already been serialized to utf-8, deserialize it for logging # TODO: find a better way to avoid (de)encoding the body back and forth - if body: - try: - body = body.decode("utf-8", "ignore") - except AttributeError: - pass + if body and isinstance(body, bytes): + body = body.decode("utf-8", "ignore") logger.debug("> %s", body) @@ -280,7 +290,12 @@ def log_request_fail( if response is not None: logger.debug("< %s", response) - def _raise_error(self, status_code, raw_data, content_type=None): + def _raise_error( + self, + status_code: int, + raw_data: Union[str, bytes], + content_type: Optional[str] = None, + ) -> None: """Locate appropriate exception and raise it.""" error_message = raw_data additional_info = None @@ -302,11 +317,11 @@ def _raise_error(self, status_code, raw_data, content_type=None): status_code, error_message, additional_info ) - def _get_default_user_agent(self): + def _get_default_user_agent(self) -> str: return "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()) @staticmethod - def default_ca_certs(): + def default_ca_certs() -> Union[str, None]: """ Get the default CA certificate bundle, preferring those configured in the standard OpenSSL environment variables before those provided by @@ -314,12 +329,12 @@ def default_ca_certs(): """ ca_certs = os.environ.get("SSL_CERT_FILE") or os.environ.get("SSL_CERT_DIR") - if ca_certs: - return ca_certs + if not ca_certs: + try: + import certifi - try: - import certifi - except ImportError: - pass - else: - return certifi.where() + ca_certs = certifi.where() + except ImportError: + pass + + return ca_certs diff --git a/opensearchpy/connection/base.pyi b/opensearchpy/connection/base.pyi deleted file mode 100644 index 7e51d20c..00000000 --- a/opensearchpy/connection/base.pyi +++ /dev/null @@ -1,118 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import ( - Any, - AnyStr, - Collection, - Dict, - List, - Mapping, - NoReturn, - Optional, - Sequence, - Tuple, - Union, -) - -logger: logging.Logger -tracer: logging.Logger - -class Connection(object): - headers: Dict[str, str] - use_ssl: bool - http_compress: bool - scheme: str - hostname: str - port: Optional[int] - host: str - url_prefix: str - timeout: Optional[Union[float, int]] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - use_ssl: bool = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... - def __repr__(self) -> str: ... - def __eq__(self, other: object) -> bool: ... - def __hash__(self) -> int: ... - def _gzip_compress(self, body: bytes) -> bytes: ... - def _raise_warnings(self, warning_headers: Sequence[str]) -> None: ... - def _pretty_json(self, data: Any) -> str: ... - def _log_trace( - self, - method: Any, - path: Any, - body: Any, - status_code: Any, - response: Any, - duration: Any, - ) -> None: ... - def perform_request( - self, - method: str, - url: str, - params: Optional[Mapping[str, Any]] = ..., - body: Optional[bytes] = ..., - timeout: Optional[Union[int, float]] = ..., - ignore: Collection[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ) -> Tuple[int, Mapping[str, str], str]: ... - def log_request_success( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - status_code: int, - response: str, - duration: float, - ) -> None: ... - def log_request_fail( - self, - method: str, - full_url: str, - path: str, - body: Optional[bytes], - duration: float, - status_code: Optional[int] = ..., - response: Optional[str] = ..., - exception: Optional[Exception] = ..., - ) -> None: ... - def _raise_error( - self, status_code: int, raw_data: str, content_type: Optional[str] - ) -> NoReturn: ... - def _get_default_user_agent(self) -> str: ... - @staticmethod - def default_ca_certs() -> Optional[str]: ... diff --git a/opensearchpy/connection/connections.py b/opensearchpy/connection/connections.py index 857cba3a..5b1e9a9c 100644 --- a/opensearchpy/connection/connections.py +++ b/opensearchpy/connection/connections.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from six import string_types import opensearchpy @@ -36,11 +39,11 @@ class Connections(object): singleton in this module. """ - def __init__(self): - self._kwargs = {} - self._conns = {} + def __init__(self) -> None: + self._kwargs: Any = {} + self._conns: Any = {} - def configure(self, **kwargs): + def configure(self, **kwargs: Any) -> None: """ Configure multiple connections at once, useful for passing in config dictionaries obtained from other sources, like Django's settings or a @@ -63,13 +66,13 @@ def configure(self, **kwargs): del self._conns[k] self._kwargs = kwargs - def add_connection(self, alias, conn): + def add_connection(self, alias: str, conn: Any) -> None: """ Add a connection object, it will be passed through as-is. """ self._conns[alias] = conn - def remove_connection(self, alias): + def remove_connection(self, alias: str) -> None: """ Remove connection from the registry. Raises ``KeyError`` if connection wasn't found. @@ -84,7 +87,7 @@ def remove_connection(self, alias): if errors == 2: raise KeyError("There is no connection with alias %r." % alias) - def create_connection(self, alias="default", **kwargs): + def create_connection(self, alias: str = "default", **kwargs: Any) -> Any: """ Construct an instance of ``opensearchpy.OpenSearch`` and register it under given alias. @@ -93,7 +96,7 @@ def create_connection(self, alias="default", **kwargs): conn = self._conns[alias] = opensearchpy.OpenSearch(**kwargs) return conn - def get_connection(self, alias="default"): + def get_connection(self, alias: str = "default") -> Any: """ Retrieve a connection, construct it if necessary (only configuration was passed to us). If a non-string alias has been passed through we diff --git a/opensearchpy/connection/connections.pyi b/opensearchpy/connection/connections.pyi deleted file mode 100644 index 07814ba4..00000000 --- a/opensearchpy/connection/connections.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -class Connections(object): ... diff --git a/opensearchpy/connection/http_async.py b/opensearchpy/connection/http_async.py index 10f5a56a..d6ee57ee 100644 --- a/opensearchpy/connection/http_async.py +++ b/opensearchpy/connection/http_async.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -13,8 +14,9 @@ import os import ssl import warnings +from typing import Any, Collection, Mapping, Optional, Union -from .._async._extra_imports import aiohttp, aiohttp_exceptions +from .._async._extra_imports import aiohttp, aiohttp_exceptions # type: ignore from .._async.compat import get_running_loop from .._async.http_aiohttp import AIOHttpConnection from ..compat import reraise_exceptions, string_types, urlencode @@ -30,27 +32,29 @@ class AsyncHttpConnection(AIOHttpConnection): + session: Optional[aiohttp.ClientSession] + def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - loop=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_fingerprint: Any = None, + maxsize: Optional[int] = 10, + headers: Optional[Mapping[str, str]] = None, + ssl_context: Any = None, + http_compress: Optional[bool] = None, + opaque_id: Optional[str] = None, + loop: Any = None, + **kwargs: Any + ) -> None: self.headers = {} super().__init__( @@ -67,7 +71,7 @@ def __init__( if isinstance(http_auth, (tuple, list)): http_auth = aiohttp.BasicAuth(login=http_auth[0], password=http_auth[1]) elif isinstance(http_auth, string_types): - login, password = http_auth.split(":", 1) + login, password = http_auth.split(":", 1) # type: ignore http_auth = aiohttp.BasicAuth(login=login, password=password) # if providing an SSL context, raise error if any other SSL related flag is used @@ -145,8 +149,15 @@ def __init__( self._ssl_context = ssl_context async def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: if self.session is None: await self._create_aiohttp_session() assert self.session is not None @@ -261,14 +272,14 @@ async def perform_request( return response.status, response.headers, raw_data - async def close(self): + async def close(self) -> Any: """ Explicitly closes connection """ if self.session: await self.session.close() - async def _create_aiohttp_session(self): + async def _create_aiohttp_session(self) -> Any: """Creates an aiohttp.ClientSession(). This is delayed until the first call to perform_request() so that AsyncTransport has a chance to set AIOHttpConnection.loop @@ -288,9 +299,9 @@ async def _create_aiohttp_session(self): ) -class OpenSearchClientResponse(aiohttp.ClientResponse): - async def text(self, encoding=None, errors="strict"): +class OpenSearchClientResponse(aiohttp.ClientResponse): # type: ignore + async def text(self, encoding: Any = None, errors: str = "strict") -> Any: if self._body is None: await self.read() - return self._body.decode("utf-8", "surrogatepass") + return self._body.decode("utf-8", "surrogatepass") # type: ignore diff --git a/opensearchpy/connection/http_async.pyi b/opensearchpy/connection/http_async.pyi deleted file mode 100644 index adde809b..00000000 --- a/opensearchpy/connection/http_async.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Mapping, Optional - -from .._async._extra_imports import aiohttp # type: ignore -from .._async.http_aiohttp import AIOHttpConnection - -class AsyncHttpConnection(AIOHttpConnection): - session: Optional[aiohttp.ClientSession] - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: Optional[int] = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - loop: Optional[Any] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_requests.py b/opensearchpy/connection/http_requests.py index e0b6d143..a966631d 100644 --- a/opensearchpy/connection/http_requests.py +++ b/opensearchpy/connection/http_requests.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -27,6 +28,7 @@ import time import warnings +from typing import Any, Collection, Mapping, Optional, Union try: import requests @@ -72,21 +74,21 @@ class RequestsHttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=True, - ssl_show_warn=True, - ca_certs=None, - client_cert=None, - client_key=None, - headers=None, - http_compress=None, - opaque_id=None, - pool_maxsize=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: bool = True, + ssl_show_warn: bool = True, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + headers: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + pool_maxsize: Any = None, + **kwargs: Any + ) -> None: if not REQUESTS_AVAILABLE: raise ImproperlyConfigured( "Please install requests to use RequestsHttpConnection." @@ -115,13 +117,13 @@ def __init__( if not self.http_compress: # Need to set this to 'None' otherwise Requests adds its own. - self.session.headers["accept-encoding"] = None + self.session.headers["accept-encoding"] = None # type: ignore if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = tuple(http_auth) elif isinstance(http_auth, string_types): - http_auth = tuple(http_auth.split(":", 1)) + http_auth = tuple(http_auth.split(":", 1)) # type: ignore self.session.auth = http_auth self.base_url = "%s%s" % ( @@ -146,7 +148,7 @@ def __init__( self.session.verify = ca_certs if not ssl_show_warn: - requests.packages.urllib3.disable_warnings() + requests.packages.urllib3.disable_warnings() # type: ignore if self.use_ssl and not verify_certs and ssl_show_warn: warnings.warn( @@ -154,17 +156,17 @@ def __init__( % self.host ) - def perform_request( + def perform_request( # type: ignore self, - method, - url, - params=None, - body=None, - timeout=None, - allow_redirects=True, - ignore=(), - headers=None, - ): + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + allow_redirects: Optional[bool] = True, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.base_url + url headers = headers or {} if params: @@ -173,7 +175,7 @@ def perform_request( orig_body = body if self.http_compress and body: body = self._gzip_compress(body) - headers["content-encoding"] = "gzip" + headers["content-encoding"] = "gzip" # type: ignore start = time.time() request = requests.Request(method=method, headers=headers, url=url, data=body) @@ -181,7 +183,7 @@ def perform_request( settings = self.session.merge_environment_settings( prepared_request.url, {}, None, None, None ) - send_kwargs = { + send_kwargs: Any = { "timeout": timeout or self.timeout, "allow_redirects": allow_redirects, } @@ -246,10 +248,10 @@ def perform_request( return response.status_code, response.headers, raw_data @property - def headers(self): + def headers(self) -> Any: # type: ignore return self.session.headers - def close(self): + def close(self) -> None: """ Explicitly closes connections """ diff --git a/opensearchpy/connection/http_requests.pyi b/opensearchpy/connection/http_requests.pyi deleted file mode 100644 index c9bb5617..00000000 --- a/opensearchpy/connection/http_requests.pyi +++ /dev/null @@ -1,50 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Mapping, Optional - -import requests - -from .base import Connection - -class RequestsHttpConnection(Connection): - session: requests.Session - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - http_auth: Optional[Any] = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - headers: Optional[Mapping[str, str]] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/http_urllib3.py b/opensearchpy/connection/http_urllib3.py index 6fc09e72..2a5ccd3b 100644 --- a/opensearchpy/connection/http_urllib3.py +++ b/opensearchpy/connection/http_urllib3.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -27,11 +28,12 @@ import ssl import time import warnings +from typing import Any, Callable, Collection, Mapping, Optional, Union -import urllib3 # type: ignore +import urllib3 from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as UrllibSSLError # type: ignore -from urllib3.util.retry import Retry # type: ignore +from urllib3.exceptions import SSLError as UrllibSSLError +from urllib3.util.retry import Retry from ..compat import reraise_exceptions, urlencode from ..exceptions import ( @@ -49,7 +51,7 @@ SSL_SHOW_WARN_DEFAULT = object() -def create_ssl_context(**kwargs): +def create_ssl_context(**kwargs: Any) -> Any: """ A helper function around creating an SSL context @@ -86,7 +88,7 @@ class Urllib3HttpConnection(Connection): ``ssl`` module for exact options for your environment). :arg ssl_assert_hostname: use hostname verification if not `False` :arg ssl_assert_fingerprint: verify the supplied certificate fingerprint if not `None` - :arg maxsize: the number of connections which will be kept open to this + :arg pool_maxsize: the number of connections which will be kept open to this host. See https://urllib3.readthedocs.io/en/1.4/pools.html#api for more information. :arg headers: any custom http headers to be add to requests @@ -97,25 +99,25 @@ class Urllib3HttpConnection(Connection): def __init__( self, - host="localhost", - port=None, - http_auth=None, - use_ssl=False, - verify_certs=VERIFY_CERTS_DEFAULT, - ssl_show_warn=SSL_SHOW_WARN_DEFAULT, - ca_certs=None, - client_cert=None, - client_key=None, - ssl_version=None, - ssl_assert_hostname=None, - ssl_assert_fingerprint=None, - maxsize=10, - headers=None, - ssl_context=None, - http_compress=None, - opaque_id=None, - **kwargs - ): + host: str = "localhost", + port: Optional[int] = None, + http_auth: Any = None, + use_ssl: bool = False, + verify_certs: Any = VERIFY_CERTS_DEFAULT, + ssl_show_warn: Any = SSL_SHOW_WARN_DEFAULT, + ca_certs: Any = None, + client_cert: Any = None, + client_key: Any = None, + ssl_version: Any = None, + ssl_assert_hostname: Any = None, + ssl_assert_fingerprint: Any = None, + pool_maxsize: Any = None, + headers: Any = None, + ssl_context: Any = None, + http_compress: Any = None, + opaque_id: Any = None, + **kwargs: Any + ) -> None: # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) @@ -128,12 +130,19 @@ def __init__( opaque_id=opaque_id, **kwargs ) - if http_auth is not None: - if isinstance(http_auth, (tuple, list)): - http_auth = ":".join(http_auth) - self.headers.update(urllib3.make_headers(basic_auth=http_auth)) - pool_class = urllib3.HTTPConnectionPool + self.http_auth = http_auth + if self.http_auth is not None: + if isinstance(self.http_auth, Callable): # type: ignore + pass + elif isinstance(self.http_auth, (tuple, list)): + self.headers.update( + urllib3.make_headers(basic_auth=":".join(http_auth)) + ) + else: + self.headers.update(urllib3.make_headers(basic_auth=http_auth)) + + pool_class: Any = urllib3.HTTPConnectionPool kw = {} # if providing an SSL context, raise error if any other SSL related flag is used @@ -203,18 +212,29 @@ def __init__( if not ssl_show_warn: urllib3.disable_warnings() + if pool_maxsize and isinstance(pool_maxsize, int): + kw["maxsize"] = pool_maxsize + self.pool = pool_class( - self.hostname, port=self.port, timeout=self.timeout, maxsize=maxsize, **kw + self.hostname, port=self.port, timeout=self.timeout, **kw ) def perform_request( - self, method, url, params=None, body=None, timeout=None, ignore=(), headers=None - ): + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Optional[bytes] = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: url = self.url_prefix + url if params: url = "%s?%s" % (url, urlencode(params)) full_url = self.host + url + start = time.time() orig_body = body try: @@ -237,6 +257,10 @@ def perform_request( body = self._gzip_compress(body) request_headers["content-encoding"] = "gzip" + if self.http_auth is not None: + if isinstance(self.http_auth, Callable): # type: ignore + request_headers.update(self.http_auth(method, full_url, body)) + response = self.pool.urlopen( method, url, body, retries=Retry(False), headers=request_headers, **kw ) @@ -275,10 +299,10 @@ def perform_request( return response.status, response.headers, raw_data - def get_response_headers(self, response): + def get_response_headers(self, response: Any) -> Any: return {header.lower(): value for header, value in response.headers.items()} - def close(self): + def close(self) -> None: """ Explicitly closes connection """ diff --git a/opensearchpy/connection/http_urllib3.pyi b/opensearchpy/connection/http_urllib3.pyi deleted file mode 100644 index 83d62117..00000000 --- a/opensearchpy/connection/http_urllib3.pyi +++ /dev/null @@ -1,64 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import ssl -from typing import Any, Mapping, Optional, Union - -import urllib3 - -from .base import Connection - -def create_ssl_context( - cafile: Any = ..., - capath: Any = ..., - cadata: Any = ..., -) -> ssl.SSLContext: ... - -class Urllib3HttpConnection(Connection): - pool: urllib3.HTTPConnectionPool - def __init__( - self, - host: str = ..., - port: Optional[int] = ..., - url_prefix: str = ..., - timeout: Optional[Union[float, int]] = ..., - http_auth: Any = ..., - use_ssl: bool = ..., - verify_certs: bool = ..., - ssl_show_warn: bool = ..., - ca_certs: Optional[Any] = ..., - client_cert: Optional[Any] = ..., - client_key: Optional[Any] = ..., - ssl_version: Optional[Any] = ..., - ssl_assert_hostname: Optional[Any] = ..., - ssl_assert_fingerprint: Optional[Any] = ..., - maxsize: int = ..., - headers: Optional[Mapping[str, str]] = ..., - ssl_context: Optional[Any] = ..., - http_compress: Optional[bool] = ..., - opaque_id: Optional[str] = ..., - **kwargs: Any - ) -> None: ... diff --git a/opensearchpy/connection/pooling.py b/opensearchpy/connection/pooling.py index bd9fe5f9..87bd8c72 100644 --- a/opensearchpy/connection/pooling.py +++ b/opensearchpy/connection/pooling.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,8 @@ # under the License. +from typing import Any + from .base import Connection try: @@ -34,6 +37,8 @@ class PoolingConnection(Connection): + _free_connections: queue.Queue[Connection] + """ Base connection class for connections that use libraries without thread safety and no capacity for connection pooling. To use this just implement a @@ -41,23 +46,23 @@ class PoolingConnection(Connection): it. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self._free_connections = queue.Queue() super(PoolingConnection, self).__init__(*args, **kwargs) - def _make_connection(self): + def _make_connection(self) -> Connection: raise NotImplementedError - def _get_connection(self): + def _get_connection(self) -> Connection: try: return self._free_connections.get_nowait() except queue.Empty: return self._make_connection() - def _release_connection(self, con): + def _release_connection(self, con: Connection) -> None: self._free_connections.put(con) - def close(self): + def close(self) -> None: """ Explicitly close connection """ diff --git a/opensearchpy/connection/pooling.pyi b/opensearchpy/connection/pooling.pyi deleted file mode 100644 index b32fd068..00000000 --- a/opensearchpy/connection/pooling.pyi +++ /dev/null @@ -1,33 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .base import Connection - -class PoolingConnection(Connection): - def _make_connection(self) -> Connection: ... - def _get_connection(self) -> Connection: ... - def _release_connection(self, con: Connection) -> None: ... - def close(self) -> None: ... diff --git a/opensearchpy/connection_pool.py b/opensearchpy/connection_pool.py index 0416fbec..378b91b3 100644 --- a/opensearchpy/connection_pool.py +++ b/opensearchpy/connection_pool.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,15 +30,13 @@ import random import threading import time +from queue import Empty, PriorityQueue +from typing import Any, Dict, Optional, Sequence, Tuple, Type -try: - from Queue import Empty, PriorityQueue -except ImportError: - from queue import PriorityQueue, Empty - +from .connection import Connection from .exceptions import ImproperlyConfigured -logger = logging.getLogger("opensearch") +logger: logging.Logger = logging.getLogger("opensearch") class ConnectionSelector(object): @@ -55,17 +54,17 @@ class ConnectionSelector(object): process it will be the dictionary returned by the `host_info_callback`. Example of where this would be useful is a zone-aware selector that would - only select connections from it's own zones and only fall back to other - connections where there would be none in it's zones. + only select connections from its own zones and only fall back to other + connections where there would be none in its zones. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: """ :arg opts: dictionary of connection instances and their options """ self.connection_opts = opts - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> None: """ Select a connection from the given list. @@ -79,7 +78,7 @@ class RandomSelector(ConnectionSelector): Select a connection at random """ - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: return random.choice(connections) @@ -88,11 +87,11 @@ class RoundRobinSelector(ConnectionSelector): Selector using round-robin. """ - def __init__(self, opts): + def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: super(RoundRobinSelector, self).__init__(opts) self.data = threading.local() - def select(self, connections): + def select(self, connections: Sequence[Connection]) -> Any: self.data.rr = getattr(self.data, "rr", -1) + 1 self.data.rr %= len(connections) return connections[self.data.rr] @@ -112,7 +111,7 @@ class ConnectionPool(object): future reference. Upon each request the `Transport` will ask for a `Connection` via the - `get_connection` method. If the connection fails (it's `perform_request` + `get_connection` method. If the connection fails (its `perform_request` raises a `ConnectionError`) it will be marked as dead (via `mark_dead`) and put on a timeout (if it fails N times in a row the timeout is exponentially longer - the formula is `default_timeout * 2 ** (fail_count - 1)`). When @@ -121,18 +120,27 @@ class ConnectionPool(object): succeeds will be marked as live (its fail count will be deleted). """ + connections_opts: Sequence[Tuple[Connection, Any]] + connections: Any + orig_connections: Tuple[Connection, ...] + dead: Any + dead_count: Dict[Any, int] + dead_timeout: float + timeout_cutoff: int + selector: Any + def __init__( self, - connections, - dead_timeout=60, - timeout_cutoff=5, - selector_class=RoundRobinSelector, - randomize_hosts=True, - **kwargs - ): + connections: Any, + dead_timeout: float = 60, + timeout_cutoff: int = 5, + selector_class: Type[ConnectionSelector] = RoundRobinSelector, + randomize_hosts: bool = True, + **kwargs: Any + ) -> None: """ :arg connections: list of tuples containing the - :class:`~opensearchpy.Connection` instance and it's options + :class:`~opensearchpy.Connection` instance and its options :arg dead_timeout: number of seconds a connection should be retired for after a failure, increases on consecutive failures :arg timeout_cutoff: number of consecutive failures after which the @@ -163,9 +171,9 @@ def __init__( self.dead_timeout = dead_timeout self.timeout_cutoff = timeout_cutoff - self.selector = selector_class(dict(connections)) + self.selector = selector_class(dict(connections)) # type: ignore - def mark_dead(self, connection, now=None): + def mark_dead(self, connection: Any, now: Optional[float] = None) -> None: """ Mark the connection as dead (failed). Remove it from the live pool and put it on a timeout. @@ -195,7 +203,7 @@ def mark_dead(self, connection, now=None): timeout, ) - def mark_live(self, connection): + def mark_live(self, connection: Any) -> None: """ Mark connection as healthy after a resurrection. Resets the fail counter for the connection. @@ -208,10 +216,10 @@ def mark_live(self, connection): # race condition, safe to ignore pass - def resurrect(self, force=False): + def resurrect(self, force: bool = False) -> Any: """ Attempt to resurrect a connection from the dead pool. It will try to - locate one (not all) eligible (it's timeout is over) connection to + locate one (not all) eligible (its timeout is over) connection to return to the live pool. Any resurrected connection is also returned. :arg force: resurrect a connection even if there is none eligible (used @@ -245,12 +253,12 @@ def resurrect(self, force=False): self.dead.put((timeout, connection)) return - # either we were forced or the connection is elligible to be retried + # either we were forced or the connection is eligible to be retried self.connections.append(connection) logger.info("Resurrecting connection %r (force=%s).", connection, force) return connection - def get_connection(self): + def get_connection(self) -> Any: """ Return a connection from the pool using the `ConnectionSelector` instance. @@ -259,7 +267,7 @@ def get_connection(self): no connections are available and passes the list of live connections to the selector instance to choose from. - Returns a connection instance and it's current fail count. + Returns a connection instance and its current fail count. """ self.resurrect() connections = self.connections[:] @@ -275,38 +283,38 @@ def get_connection(self): # only one connection, no need for a selector return connections[0] - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ for conn in self.connections: conn.close() - def __repr__(self): + def __repr__(self) -> str: return "<%s: %r>" % (type(self).__name__, self.connections) class DummyConnectionPool(ConnectionPool): - def __init__(self, connections, **kwargs): + def __init__(self, connections: Any, **kwargs: Any) -> None: if len(connections) != 1: raise ImproperlyConfigured( "DummyConnectionPool needs exactly one " "connection defined." ) # we need connection opts for sniffing logic self.connection_opts = connections - self.connection = connections[0][0] + self.connection: Any = connections[0][0] self.connections = (self.connection,) - def get_connection(self): + def get_connection(self) -> Any: return self.connection - def close(self): + def close(self) -> None: """ Explicitly closes connections """ self.connection.close() - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass mark_dead = mark_live = resurrect = _noop @@ -315,14 +323,14 @@ def _noop(self, *args, **kwargs): class EmptyConnectionPool(ConnectionPool): """A connection pool that is empty. Errors out if used.""" - def __init__(self, *_, **__): + def __init__(self, *_: Any, **__: Any) -> None: self.connections = [] self.connection_opts = [] - def get_connection(self): + def get_connection(self) -> Connection: raise ImproperlyConfigured("No connections were configured") - def _noop(self, *args, **kwargs): + def _noop(self, *args: Any, **kwargs: Any) -> Any: pass close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/connection_pool.pyi b/opensearchpy/connection_pool.pyi deleted file mode 100644 index 7a528cf4..00000000 --- a/opensearchpy/connection_pool.pyi +++ /dev/null @@ -1,85 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union - -from .connection import Connection - -try: - from Queue import PriorityQueue -except ImportError: - from queue import PriorityQueue - -logger: logging.Logger - -class ConnectionSelector(object): - connection_opts: Sequence[Tuple[Connection, Any]] - def __init__(self, opts: Sequence[Tuple[Connection, Any]]) -> None: ... - def select(self, connections: Sequence[Connection]) -> Connection: ... - -class RandomSelector(ConnectionSelector): ... -class RoundRobinSelector(ConnectionSelector): ... - -class ConnectionPool(object): - connections_opts: Sequence[Tuple[Connection, Any]] - connections: Sequence[Connection] - orig_connections: Tuple[Connection, ...] - dead: PriorityQueue - dead_count: Dict[Connection, int] - dead_timeout: float - timeout_cutoff: int - selector: ConnectionSelector - def __init__( - self, - connections: Sequence[Tuple[Connection, Any]], - dead_timeout: float = ..., - timeout_cutoff: int = ..., - selector_class: Type[ConnectionSelector] = ..., - randomize_hosts: bool = ..., - **kwargs: Any - ) -> None: ... - def mark_dead(self, connection: Connection, now: Optional[float] = ...) -> None: ... - def mark_live(self, connection: Connection) -> None: ... - def resurrect(self, force: bool = ...) -> Optional[Connection]: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def __repr__(self) -> str: ... - -class DummyConnectionPool(ConnectionPool): - def __init__( - self, connections: Sequence[Tuple[Connection, Any]], **kwargs: Any - ) -> None: ... - def get_connection(self) -> Connection: ... - def close(self) -> None: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - mark_dead = mark_live = resurrect = _noop - -class EmptyConnectionPool(ConnectionPool): - def __init__(self, *_: Any, **__: Any) -> None: ... - def get_connection(self) -> Connection: ... - def _noop(self, *args: Any, **kwargs: Any) -> Any: ... - close = mark_dead = mark_live = resurrect = _noop diff --git a/opensearchpy/exceptions.py b/opensearchpy/exceptions.py index cc35c91f..58d29bdf 100644 --- a/opensearchpy/exceptions.py +++ b/opensearchpy/exceptions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,8 @@ # under the License. +from typing import Any, Dict, Type, Union + __all__ = [ "ImproperlyConfigured", "OpenSearchException", @@ -75,32 +78,33 @@ class TransportError(OpenSearchException): """ @property - def status_code(self): + def status_code(self) -> Union[str, int]: """ The HTTP status code of the response that precipitated the error or ``'N/A'`` if not applicable. """ - return self.args[0] + return self.args[0] # type: ignore @property - def error(self): + def error(self) -> str: """A string error message.""" - return self.args[1] + return self.args[1] # type: ignore @property - def info(self): + def info(self) -> Union[Dict[str, Any], Exception, Any]: """ Dict of returned error info from OpenSearch, where available, underlying exception when not. """ return self.args[2] - def __str__(self): + def __str__(self) -> str: cause = "" try: - if self.info and "error" in self.info: - if isinstance(self.info["error"], dict): - root_cause = self.info["error"]["root_cause"][0] + if self.info and isinstance(self.info, dict) and "error" in self.info: + error = self.info["error"] + if isinstance(error, dict): + root_cause = error["root_cause"][0] cause = ", ".join( filter( None, @@ -127,7 +131,7 @@ class ConnectionError(TransportError): implementation is available as ``.info``. """ - def __str__(self): + def __str__(self) -> str: return "ConnectionError(%s) caused by: %s(%s)" % ( self.error, self.info.__class__.__name__, @@ -142,7 +146,7 @@ class SSLError(ConnectionError): class ConnectionTimeout(ConnectionError): """A network timeout. Doesn't cause a node retry by default.""" - def __str__(self): + def __str__(self) -> str: return "ConnectionTimeout caused by - %s(%s)" % ( self.info.__class__.__name__, self.info, @@ -198,7 +202,7 @@ class OpenSearchWarning(Warning): # more generic mappings from status_code to python exceptions -HTTP_EXCEPTIONS = { +HTTP_EXCEPTIONS: Dict[int, Type[OpenSearchException]] = { 400: RequestError, 401: AuthenticationException, 403: AuthorizationException, diff --git a/opensearchpy/exceptions.pyi b/opensearchpy/exceptions.pyi deleted file mode 100644 index 8adafdd8..00000000 --- a/opensearchpy/exceptions.pyi +++ /dev/null @@ -1,63 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Union - -class ImproperlyConfigured(Exception): ... -class OpenSearchException(Exception): ... -class SerializationError(OpenSearchException): ... - -class TransportError(OpenSearchException): - @property - def status_code(self) -> Union[str, int]: ... - @property - def error(self) -> str: ... - @property - def info(self) -> Union[Dict[str, Any], Exception, Any]: ... - def __str__(self) -> str: ... - -class ConnectionError(TransportError): - def __str__(self) -> str: ... - -class SSLError(ConnectionError): ... - -class ConnectionTimeout(ConnectionError): - def __str__(self) -> str: ... - -class NotFoundError(TransportError): ... -class ConflictError(TransportError): ... -class RequestError(TransportError): ... -class AuthenticationException(TransportError): ... -class AuthorizationException(TransportError): ... -class OpenSearchDslException(Exception): ... -class UnknownDslObject(OpenSearchDslException): ... -class ValidationException(ValueError, OpenSearchDslException): ... -class IllegalOperation(OpenSearchDslException): ... -class OpenSearchWarning(Warning): ... - -OpenSearchDeprecationWarning = OpenSearchWarning - -HTTP_EXCEPTIONS: Dict[int, OpenSearchException] diff --git a/opensearchpy/helpers/__init__.py b/opensearchpy/helpers/__init__.py index 72a7d140..7116dc48 100644 --- a/opensearchpy/helpers/__init__.py +++ b/opensearchpy/helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,8 +26,12 @@ # under the License. -import sys - +from .._async.helpers.actions import ( + async_bulk, + async_reindex, + async_scan, + async_streaming_bulk, +) from .actions import ( _chunk_actions, _process_bulk_chunk, @@ -39,7 +44,7 @@ ) from .asyncsigner import AWSV4SignerAsyncAuth from .errors import BulkIndexError, ScanError -from .signer import AWSV4SignerAuth +from .signer import AWSV4SignerAuth, RequestsAWSV4SignerAuth, Urllib3AWSV4SignerAuth __all__ = [ "BulkIndexError", @@ -54,16 +59,10 @@ "_process_bulk_chunk", "AWSV4SignerAuth", "AWSV4SignerAsyncAuth", + "RequestsAWSV4SignerAuth", + "Urllib3AWSV4SignerAuth", + "async_scan", + "async_bulk", + "async_reindex", + "async_streaming_bulk", ] - - -# Asyncio only supported on Python 3.6+ -if sys.version_info >= (3, 6): - from .._async.helpers.actions import ( - async_bulk, - async_reindex, - async_scan, - async_streaming_bulk, - ) - - __all__ += ["async_scan", "async_bulk", "async_reindex", "async_streaming_bulk"] diff --git a/opensearchpy/helpers/__init__.pyi b/opensearchpy/helpers/__init__.pyi deleted file mode 100644 index 59b5cefd..00000000 --- a/opensearchpy/helpers/__init__.pyi +++ /dev/null @@ -1,52 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import sys - -from .actions import _chunk_actions as _chunk_actions -from .actions import _process_bulk_chunk as _process_bulk_chunk -from .actions import bulk as bulk -from .actions import expand_action as expand_action -from .actions import parallel_bulk as parallel_bulk -from .actions import reindex as reindex -from .actions import scan as scan -from .actions import streaming_bulk as streaming_bulk -from .errors import BulkIndexError as BulkIndexError -from .errors import ScanError as ScanError - -try: - # Asyncio only supported on Python 3.6+ - if sys.version_info < (3, 6): - raise ImportError - - from .._async.helpers.actions import async_bulk as async_bulk - from .._async.helpers.actions import async_reindex as async_reindex - from .._async.helpers.actions import async_scan as async_scan - from .._async.helpers.actions import async_streaming_bulk as async_streaming_bulk - from .asyncsigner import AWSV4SignerAsyncAuth as AWSV4SignerAsyncAuth - from .signer import AWSV4SignerAuth as AWSV4SignerAuth -except (ImportError, SyntaxError): - pass diff --git a/opensearchpy/helpers/actions.py b/opensearchpy/helpers/actions.py index e565256f..7f8ced35 100644 --- a/opensearchpy/helpers/actions.py +++ b/opensearchpy/helpers/actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -28,6 +29,7 @@ import logging import time from operator import methodcaller +from typing import Any, Optional from ..compat import Mapping, Queue, map, string_types from ..exceptions import TransportError @@ -36,7 +38,7 @@ logger = logging.getLogger("opensearchpy.helpers") -def expand_action(data): +def expand_action(data: Any) -> Any: """ From one document or action definition passed in by the user extract the action/data lines needed for opensearch's @@ -49,7 +51,7 @@ def expand_action(data): # make sure we don't alter the action data = data.copy() op_type = data.pop("_op_type", "index") - action = {op_type: {}} + action: Any = {op_type: {}} # If '_source' is a dict use it for source # otherwise if op_type == 'update' then @@ -104,17 +106,17 @@ def expand_action(data): class _ActionChunker: - def __init__(self, chunk_size, max_chunk_bytes, serializer): + def __init__(self, chunk_size: int, max_chunk_bytes: int, serializer: Any) -> None: self.chunk_size = chunk_size self.max_chunk_bytes = max_chunk_bytes self.serializer = serializer self.size = 0 self.action_count = 0 - self.bulk_actions = [] - self.bulk_data = [] + self.bulk_actions: Any = [] + self.bulk_data: Any = [] - def feed(self, action, data): + def feed(self, action: Any, data: Any) -> Any: ret = None raw_data, raw_action = data, action action = self.serializer.dumps(action) @@ -145,7 +147,7 @@ def feed(self, action, data): self.action_count += 1 return ret - def flush(self): + def flush(self) -> Any: ret = None if self.bulk_actions: ret = (self.bulk_data, self.bulk_actions) @@ -153,7 +155,9 @@ def flush(self): return ret -def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): +def _chunk_actions( + actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Any +) -> Any: """ Split actions into chunks by number or size, serialize them into strings in the process. @@ -170,7 +174,9 @@ def _chunk_actions(actions, chunk_size, max_chunk_bytes, serializer): yield ret -def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=True): +def _process_bulk_chunk_success( + resp: Any, bulk_data: Any, ignore_status: Any = (), raise_on_error: bool = True +) -> Any: # if raise on error is set, we need to collect errors per chunk before raising them errors = [] @@ -197,8 +203,12 @@ def _process_bulk_chunk_success(resp, bulk_data, ignore_status, raise_on_error=T def _process_bulk_chunk_error( - error, bulk_data, ignore_status, raise_on_exception=True, raise_on_error=True -): + error: Any, + bulk_data: Any, + ignore_status: Any = (), + raise_on_exception: bool = True, + raise_on_error: bool = True, +) -> Any: # default behavior - just propagate exception if raise_on_exception and error.status_code not in ignore_status: raise error @@ -227,15 +237,15 @@ def _process_bulk_chunk_error( def _process_bulk_chunk( - client, - bulk_actions, - bulk_data, - raise_on_exception=True, - raise_on_error=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + bulk_actions: Any, + bulk_data: Any, + raise_on_exception: bool = True, + raise_on_error: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Send a bulk request to opensearch and process the output. """ @@ -265,21 +275,21 @@ def _process_bulk_chunk( def streaming_bulk( - client, - actions, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - raise_on_error=True, - expand_action_callback=expand_action, - raise_on_exception=True, - max_retries=0, - initial_backoff=2, - max_backoff=600, - yield_ok=True, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + raise_on_error: bool = True, + expand_action_callback: Any = expand_action, + raise_on_exception: bool = True, + max_retries: int = 0, + initial_backoff: int = 2, + max_backoff: int = 600, + yield_ok: bool = True, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Streaming bulk consumes actions from the iterable passed in and yields results per action. For non-streaming usecases use @@ -319,7 +329,8 @@ def streaming_bulk( actions, chunk_size, max_chunk_bytes, client.transport.serializer ): for attempt in range(max_retries + 1): - to_retry, to_retry_data = [], [] + to_retry: Any = [] + to_retry_data: Any = [] if attempt: time.sleep(min(max_backoff, initial_backoff * 2 ** (attempt - 1))) @@ -368,7 +379,14 @@ def streaming_bulk( bulk_actions, bulk_data = to_retry, to_retry_data -def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): +def bulk( + client: Any, + actions: Any, + stats_only: bool = False, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Helper for the :meth:`~opensearchpy.OpenSearch.bulk` api that provides a more human friendly interface - it consumes an iterator of actions and @@ -404,9 +422,7 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): # make streaming_bulk yield successful results so we can count them kwargs["yield_ok"] = True - for ok, item in streaming_bulk( - client, actions, ignore_status=ignore_status, *args, **kwargs - ): + for ok, item in streaming_bulk(client, actions, ignore_status=ignore_status, *args, **kwargs): # type: ignore # go through request-response pairs and detect failures if not ok: if not stats_only: @@ -419,17 +435,17 @@ def bulk(client, actions, stats_only=False, ignore_status=(), *args, **kwargs): def parallel_bulk( - client, - actions, - thread_count=4, - chunk_size=500, - max_chunk_bytes=100 * 1024 * 1024, - queue_size=4, - expand_action_callback=expand_action, - ignore_status=(), - *args, - **kwargs -): + client: Any, + actions: Any, + thread_count: int = 4, + chunk_size: int = 500, + max_chunk_bytes: int = 100 * 1024 * 1024, + queue_size: int = 4, + expand_action_callback: Any = expand_action, + ignore_status: Any = (), + *args: Any, + **kwargs: Any +) -> Any: """ Parallel version of the bulk helper run in multiple threads at once. @@ -456,11 +472,11 @@ def parallel_bulk( actions = map(expand_action_callback, actions) class BlockingPool(ThreadPool): - def _setup_queues(self): + def _setup_queues(self) -> None: super(BlockingPool, self)._setup_queues() # type: ignore # The queue must be at least the size of the number of threads to # prevent hanging when inserting sentinel values during teardown. - self._inqueue = Queue(max(queue_size, thread_count)) + self._inqueue: Any = Queue(max(queue_size, thread_count)) self._quick_put = self._inqueue.put pool = BlockingPool(thread_count) @@ -469,12 +485,7 @@ def _setup_queues(self): for result in pool.imap( lambda bulk_chunk: list( _process_bulk_chunk( - client, - bulk_chunk[1], - bulk_chunk[0], - ignore_status=ignore_status, - *args, - **kwargs + client, bulk_chunk[1], bulk_chunk[0], ignore_status, *args, **kwargs ) ), _chunk_actions( @@ -490,17 +501,17 @@ def _setup_queues(self): def scan( - client, - query=None, - scroll="5m", - raise_on_error=True, - preserve_order=False, - size=1000, - request_timeout=None, - clear_scroll=True, - scroll_kwargs=None, - **kwargs -): + client: Any, + query: Any = None, + scroll: Optional[str] = "5m", + raise_on_error: Optional[bool] = True, + preserve_order: Optional[bool] = False, + size: Optional[int] = 1000, + request_timeout: Optional[float] = None, + clear_scroll: Optional[bool] = True, + scroll_kwargs: Any = None, + **kwargs: Any +) -> Any: """ Simple abstraction on top of the :meth:`~opensearchpy.OpenSearch.scroll` api - a simple iterator that @@ -608,16 +619,16 @@ def scan( def reindex( - client, - source_index, - target_index, - query=None, - target_client=None, - chunk_size=500, - scroll="5m", - scan_kwargs={}, - bulk_kwargs={}, -): + client: Any, + source_index: Any, + target_index: Any, + query: Any = None, + target_client: Any = None, + chunk_size: int = 500, + scroll: str = "5m", + scan_kwargs: Any = {}, + bulk_kwargs: Any = {}, +) -> Any: """ Reindex all documents from one index that satisfy a given query to another, potentially (if `target_client` is specified) on a different cluster. @@ -651,7 +662,7 @@ def reindex( target_client = client if target_client is None else target_client docs = scan(client, query=query, index=source_index, scroll=scroll, **scan_kwargs) - def _change_doc_index(hits, index): + def _change_doc_index(hits: Any, index: Any) -> Any: for h in hits: h["_index"] = index if "fields" in h: diff --git a/opensearchpy/helpers/actions.pyi b/opensearchpy/helpers/actions.pyi deleted file mode 100644 index 4fee4bd1..00000000 --- a/opensearchpy/helpers/actions.pyi +++ /dev/null @@ -1,136 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import logging -import sys -from typing import ( - Any, - AsyncIterable, - Callable, - Collection, - Dict, - Generator, - Iterable, - List, - Mapping, - Optional, - Tuple, - Union, - overload, -) - -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal - -from ..client import OpenSearch -from ..serializer import Serializer - -logger: logging.Logger - -def expand_action(data: Any) -> Tuple[Dict[str, Any], Optional[Any]]: ... -def _chunk_actions( - actions: Any, chunk_size: int, max_chunk_bytes: int, serializer: Serializer -) -> Generator[Any, None, None]: ... -def _process_bulk_chunk( - client: OpenSearch, - bulk_actions: Any, - bulk_data: Any, - raise_on_exception: bool = ..., - raise_on_error: bool = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def streaming_bulk( - client: OpenSearch, - actions: Union[Iterable[Any], AsyncIterable[Any]], - chunk_size: int = ..., - max_chunk_bytes: int = ..., - raise_on_error: bool = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - raise_on_exception: bool = ..., - max_retries: int = ..., - initial_backoff: Union[float, int] = ..., - max_backoff: Union[float, int] = ..., - yield_ok: bool = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[True] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, int]: ... -@overload -def bulk( - client: OpenSearch, - actions: Iterable[Any], - stats_only: Literal[False], - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Tuple[int, List[Any]]: ... -def parallel_bulk( - client: OpenSearch, - actions: Iterable[Any], - thread_count: int = ..., - chunk_size: int = ..., - max_chunk_bytes: int = ..., - queue_size: int = ..., - expand_action_callback: Callable[[Any], Tuple[Dict[str, Any], Optional[Any]]] = ..., - ignore_status: Optional[Union[int, Collection[int]]] = ..., - *args: Any, - **kwargs: Any -) -> Generator[Tuple[bool, Any], None, None]: ... -def scan( - client: OpenSearch, - query: Optional[Any] = ..., - scroll: str = ..., - raise_on_error: bool = ..., - preserve_order: bool = ..., - size: int = ..., - request_timeout: Optional[Union[float, int]] = ..., - clear_scroll: bool = ..., - scroll_kwargs: Optional[Mapping[str, Any]] = ..., - **kwargs: Any -) -> Generator[Any, None, None]: ... -def reindex( - client: OpenSearch, - source_index: Union[str, Collection[str]], - target_index: str, - query: Any = ..., - target_client: Optional[OpenSearch] = ..., - chunk_size: int = ..., - scroll: str = ..., - scan_kwargs: Optional[Mapping[str, Any]] = ..., - bulk_kwargs: Optional[Mapping[str, Any]] = ..., -) -> Tuple[int, Union[int, List[Any]]]: ... diff --git a/opensearchpy/helpers/aggs.py b/opensearchpy/helpers/aggs.py index 5a7f800c..59795614 100644 --- a/opensearchpy/helpers/aggs.py +++ b/opensearchpy/helpers/aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,16 +25,15 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc + +import collections.abc as collections_abc +from typing import Any, Optional from .response.aggs import AggResponse, BucketData, FieldBucketData, TopHitsData from .utils import DslBase -def A(name_or_agg, filter=None, **params): +def A(name_or_agg: Any, filter: Any = None, **params: Any) -> Any: if filter is not None: if name_or_agg != "filter": raise ValueError( @@ -47,7 +47,7 @@ def A(name_or_agg, filter=None, **params): if params: raise ValueError("A() cannot accept parameters when passing in a dict.") # copy to avoid modifying in-place - agg = name_or_agg.copy() + agg = name_or_agg.copy() # type: ignore # pop out nested aggs aggs = agg.pop("aggs", None) # pop out meta data @@ -80,20 +80,20 @@ def A(name_or_agg, filter=None, **params): class Agg(DslBase): - _type_name = "agg" + _type_name: str = "agg" _type_shortcut = staticmethod(A) - name = None + name: Optional[str] = None - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return False - def to_dict(self): + def to_dict(self) -> Any: d = super(Agg, self).to_dict() if "meta" in d[self.name]: d["meta"] = d[self.name].pop("meta") return d - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return AggResponse(self, search, data) @@ -102,10 +102,10 @@ class AggBase(object): "aggs": {"type": "agg", "hash": True}, } - def __contains__(self, key): + def __contains__(self: Any, key: Any) -> bool: return key in self._params.get("aggs", {}) - def __getitem__(self, agg_name): + def __getitem__(self: Any, agg_name: Any) -> Any: agg = self._params.setdefault("aggs", {})[agg_name] # propagate KeyError # make sure we're not mutating a shared state - whenever accessing a @@ -117,13 +117,15 @@ def __getitem__(self, agg_name): return agg - def __setitem__(self, agg_name, agg): + def __setitem__(self: Any, agg_name: str, agg: Any) -> None: self.aggs[agg_name] = A(agg) - def __iter__(self): + def __iter__(self: Any) -> Any: return iter(self.aggs) - def _agg(self, bucket, name, agg_type, *args, **params): + def _agg( + self: Any, bucket: Any, name: Any, agg_type: Any, *args: Any, **params: Any + ) -> Any: agg = self[name] = A(agg_type, *args, **params) # For chaining - when creating new buckets return them... @@ -133,26 +135,26 @@ def _agg(self, bucket, name, agg_type, *args, **params): else: return self._base - def metric(self, name, agg_type, *args, **params): + def metric(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def bucket(self, name, agg_type, *args, **params): + def bucket(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(True, name, agg_type, *args, **params) - def pipeline(self, name, agg_type, *args, **params): + def pipeline(self: Any, name: Any, agg_type: Any, *args: Any, **params: Any) -> Any: return self._agg(False, name, agg_type, *args, **params) - def result(self, search, data): + def result(self: Any, search: Any, data: Any) -> Any: return BucketData(self, search, data) class Bucket(AggBase, Agg): - def __init__(self, **params): + def __init__(self, **params: Any) -> None: super(Bucket, self).__init__(**params) # remember self for chaining self._base = self - def to_dict(self): + def to_dict(self) -> Any: d = super(AggBase, self).to_dict() if "aggs" in d[self.name]: d["aggs"] = d[self.name].pop("aggs") @@ -160,18 +162,18 @@ def to_dict(self): class Filter(Bucket): - name = "filter" + name: Optional[str] = "filter" _param_defs = { "filter": {"type": "query"}, "aggs": {"type": "agg", "hash": True}, } - def __init__(self, filter=None, **params): + def __init__(self, filter: Any = None, **params: Any) -> None: if filter is not None: params["filter"] = filter super(Filter, self).__init__(**params) - def to_dict(self): + def to_dict(self) -> Any: d = super(Filter, self).to_dict() d[self.name].update(d[self.name].pop("filter", {})) return d @@ -183,7 +185,7 @@ class Pipeline(Agg): # bucket aggregations class Filters(Bucket): - name = "filters" + name: str = "filters" _param_defs = { "filters": {"type": "query", "hash": True}, "aggs": {"type": "agg", "hash": True}, @@ -201,7 +203,7 @@ class Parent(Bucket): class DateHistogram(Bucket): name = "date_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -236,7 +238,7 @@ class Global(Bucket): class Histogram(Bucket): name = "histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -259,7 +261,7 @@ class Range(Bucket): class RareTerms(Bucket): name = "rare_terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -278,7 +280,7 @@ class SignificantText(Bucket): class Terms(Bucket): name = "terms" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -301,7 +303,7 @@ class Composite(Bucket): class VariableWidthHistogram(Bucket): name = "variable_width_histogram" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return FieldBucketData(self, search, data) @@ -309,7 +311,7 @@ def result(self, search, data): class TopHits(Agg): name = "top_hits" - def result(self, search, data): + def result(self, search: Any, data: Any) -> Any: return TopHitsData(self, search, data) diff --git a/opensearchpy/helpers/aggs.pyi b/opensearchpy/helpers/aggs.pyi deleted file mode 100644 index e3f6e93c..00000000 --- a/opensearchpy/helpers/aggs.pyi +++ /dev/null @@ -1,104 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Any - -from _typeshed import Incomplete - -from ..response.aggs import AggResponse as AggResponse -from ..response.aggs import BucketData as BucketData -from ..response.aggs import FieldBucketData as FieldBucketData -from ..response.aggs import TopHitsData as TopHitsData -from .utils import DslBase -from .utils import DslBase as DslBase - -def A(name_or_agg: Any, filter: Incomplete | None = ..., **params: Any) -> Any: ... - -class Agg(DslBase): ... -class AggBase(object): ... -class Bucket(AggBase, Agg): ... -class Filter(Bucket): ... -class Pipeline(Agg): ... -class Filters(Bucket): ... -class Children(Bucket): ... -class Parent(Bucket): ... -class DateHistogram(Bucket): ... -class AutoDateHistogram(DateHistogram): ... -class DateRange(Bucket): ... -class GeoDistance(Bucket): ... -class GeohashGrid(Bucket): ... -class GeotileGrid(Bucket): ... -class GeoCentroid(Bucket): ... -class Global(Bucket): ... -class Histogram(Bucket): ... -class IPRange(Bucket): ... -class Missing(Bucket): ... -class Nested(Bucket): ... -class Range(Bucket): ... -class RareTerms(Bucket): ... -class ReverseNested(Bucket): ... -class SignificantTerms(Bucket): ... -class SignificantText(Bucket): ... -class Terms(Bucket): ... -class Sampler(Bucket): ... -class DiversifiedSampler(Bucket): ... -class Composite(Bucket): ... -class VariableWidthHistogram(Bucket): ... -class TopHits(Agg): ... -class Avg(Agg): ... -class WeightedAvg(Agg): ... -class Cardinality(Agg): ... -class ExtendedStats(Agg): ... -class Boxplot(Agg): ... -class GeoBounds(Agg): ... -class Max(Agg): ... -class MedianAbsoluteDeviation(Agg): ... -class Min(Agg): ... -class Percentiles(Agg): ... -class PercentileRanks(Agg): ... -class ScriptedMetric(Agg): ... -class Stats(Agg): ... -class Sum(Agg): ... -class TTest(Agg): ... -class ValueCount(Agg): ... -class AvgBucket(Pipeline): ... -class BucketScript(Pipeline): ... -class BucketSelector(Pipeline): ... -class CumulativeSum(Pipeline): ... -class CumulativeCardinality(Pipeline): ... -class Derivative(Pipeline): ... -class ExtendedStatsBucket(Pipeline): ... -class Inference(Pipeline): ... -class MaxBucket(Pipeline): ... -class MinBucket(Pipeline): ... -class MovingFn(Pipeline): ... -class MovingAvg(Pipeline): ... -class MovingPercentiles(Pipeline): ... -class Normalize(Pipeline): ... -class PercentilesBucket(Pipeline): ... -class SerialDiff(Pipeline): ... -class StatsBucket(Pipeline): ... -class SumBucket(Pipeline): ... -class BucketSort(Pipeline): ... diff --git a/opensearchpy/helpers/analysis.py b/opensearchpy/helpers/analysis.py index 251b004a..c228acd1 100644 --- a/opensearchpy/helpers/analysis.py +++ b/opensearchpy/helpers/analysis.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,17 +25,20 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + import six from opensearchpy.connection.connections import get_connection -from opensearchpy.helpers.utils import AttrDict, DslBase, merge -__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] +from .utils import AttrDict, DslBase, merge class AnalysisBase(object): @classmethod - def _type_shortcut(cls, name_or_instance, type=None, **kwargs): + def _type_shortcut( + cls: Any, name_or_instance: Any, type: Any = None, **kwargs: Any + ) -> Any: if isinstance(name_or_instance, cls): if type or kwargs: raise ValueError("%s() cannot accept parameters." % cls.__name__) @@ -49,29 +53,31 @@ def _type_shortcut(cls, name_or_instance, type=None, **kwargs): class CustomAnalysis(object): - name = "custom" + name: Optional[str] = "custom" - def __init__(self, filter_name, builtin_type="custom", **kwargs): + def __init__( + self, filter_name: str, builtin_type: str = "custom", **kwargs: Any + ) -> None: self._builtin_type = builtin_type self._name = filter_name super(CustomAnalysis, self).__init__(**kwargs) - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name - def get_definition(self): - d = super(CustomAnalysis, self).to_dict() + def get_definition(self) -> Any: + d = super(CustomAnalysis, self).to_dict() # type: ignore d = d.pop(self.name) d["type"] = self._builtin_type return d class CustomAnalysisDefinition(CustomAnalysis): - def get_analysis_definition(self): + def get_analysis_definition(self: Any) -> Any: out = {self._type_name: {self._name: self.get_definition()}} - t = getattr(self, "tokenizer", None) + t: Any = getattr(self, "tokenizer", None) if "tokenizer" in self._param_defs and hasattr(t, "get_definition"): out["tokenizer"] = {t._name: t.get_definition()} @@ -102,24 +108,24 @@ def get_analysis_definition(self): class BuiltinAnalysis(object): - name = "builtin" + name: Optional[str] = "builtin" - def __init__(self, name): + def __init__(self, name: Any) -> None: self._name = name super(BuiltinAnalysis, self).__init__() - def to_dict(self): + def to_dict(self) -> Any: # only name to present in lists return self._name class Analyzer(AnalysisBase, DslBase): - _type_name = "analyzer" - name = None + _type_name: str = "analyzer" + name: Optional[str] = None class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -130,7 +136,13 @@ class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): "tokenizer": {"type": "tokenizer"}, } - def simulate(self, text, using="default", explain=False, attributes=None): + def simulate( + self, + text: Any, + using: str = "default", + explain: bool = False, + attributes: Any = None, + ) -> Any: """ Use the Analyze API of opensearch to test the outcome of this analyzer. @@ -171,12 +183,12 @@ def simulate(self, text, using="default", explain=False, attributes=None): class Normalizer(AnalysisBase, DslBase): - _type_name = "normalizer" - name = None + _type_name: str = "normalizer" + name: Optional[str] = None class BuiltinNormalizer(BuiltinAnalysis, Normalizer): - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: return {} @@ -188,8 +200,8 @@ class CustomNormalizer(CustomAnalysisDefinition, Normalizer): class Tokenizer(AnalysisBase, DslBase): - _type_name = "tokenizer" - name = None + _type_name: str = "tokenizer" + name: Optional[str] = None class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): @@ -201,8 +213,8 @@ class CustomTokenizer(CustomAnalysis, Tokenizer): class TokenFilter(AnalysisBase, DslBase): - _type_name = "token_filter" - name = None + _type_name: str = "token_filter" + name: Optional[str] = None class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): @@ -216,7 +228,7 @@ class CustomTokenFilter(CustomAnalysis, TokenFilter): class MultiplexerTokenFilter(CustomTokenFilter): name = "multiplexer" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filters" in d: @@ -229,11 +241,11 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filters"): return {} - fs = {} + fs: Any = {} d = {"filter": fs} for filters in self.filters: if isinstance(filters, six.string_types): @@ -251,7 +263,7 @@ def get_analysis_definition(self): class ConditionalTokenFilter(CustomTokenFilter): name = "condition" - def get_definition(self): + def get_definition(self) -> Any: d = super(CustomTokenFilter, self).get_definition() if "filter" in d: d["filter"] = [ @@ -259,7 +271,7 @@ def get_definition(self): ] return d - def get_analysis_definition(self): + def get_analysis_definition(self) -> Any: if not hasattr(self, "filter"): return {} @@ -273,8 +285,8 @@ def get_analysis_definition(self): class CharFilter(AnalysisBase, DslBase): - _type_name = "char_filter" - name = None + _type_name: str = "char_filter" + name: Optional[str] = None class BuiltinCharFilter(BuiltinAnalysis, CharFilter): @@ -291,3 +303,5 @@ class CustomCharFilter(CustomAnalysis, CharFilter): token_filter = TokenFilter._type_shortcut char_filter = CharFilter._type_shortcut normalizer = Normalizer._type_shortcut + +__all__ = ["tokenizer", "analyzer", "char_filter", "token_filter", "normalizer"] diff --git a/opensearchpy/helpers/analysis.pyi b/opensearchpy/helpers/analysis.pyi deleted file mode 100644 index b4f37af5..00000000 --- a/opensearchpy/helpers/analysis.pyi +++ /dev/null @@ -1,49 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class AnalysisBase(object): ... -class CustomAnalysis(object): ... -class CustomAnalysisDefinition(CustomAnalysis): ... -class BuiltinAnalysis(object): ... -class Analyzer(AnalysisBase, DslBase): ... -class BuiltinAnalyzer(BuiltinAnalysis, Analyzer): ... -class CustomAnalyzer(CustomAnalysisDefinition, Analyzer): ... -class Normalizer(AnalysisBase, DslBase): ... -class BuiltinNormalizer(BuiltinAnalysis, Normalizer): ... -class CustomNormalizer(CustomAnalysisDefinition, Normalizer): ... -class Tokenizer(AnalysisBase, DslBase): ... -class BuiltinTokenizer(BuiltinAnalysis, Tokenizer): ... -class CustomTokenizer(CustomAnalysis, Tokenizer): ... -class TokenFilter(AnalysisBase, DslBase): ... -class BuiltinTokenFilter(BuiltinAnalysis, TokenFilter): ... -class CustomTokenFilter(CustomAnalysis, TokenFilter): ... -class MultiplexerTokenFilter(CustomTokenFilter): ... -class ConditionalTokenFilter(CustomTokenFilter): ... -class CharFilter(AnalysisBase, DslBase): ... -class BuiltinCharFilter(BuiltinAnalysis, CharFilter): ... -class CustomCharFilter(CustomAnalysis, CharFilter): ... diff --git a/opensearchpy/helpers/asyncsigner.py b/opensearchpy/helpers/asyncsigner.py index 08a81748..8dee4fee 100644 --- a/opensearchpy/helpers/asyncsigner.py +++ b/opensearchpy/helpers/asyncsigner.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,9 +8,7 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys - -PY3 = sys.version_info[0] == 3 +from typing import Any, Dict, Optional, Union class AWSV4SignerAsyncAuth: @@ -17,7 +16,7 @@ class AWSV4SignerAsyncAuth: AWS V4 Request Signer for Async Requests. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials: Any, region: str, service: str = "es") -> None: if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -30,10 +29,22 @@ def __init__(self, credentials, region, service="es"): # type: ignore raise ValueError("Service name cannot be empty") self.service = service - def __call__(self, method, url, query_string, body): # type: ignore - return self._sign_request(method, url, query_string, body) # type: ignore + def __call__( + self, + method: str, + url: str, + query_string: Optional[str] = None, + body: Optional[Union[str, bytes]] = None, + ) -> Dict[str, str]: + return self._sign_request(method, url, query_string, body) - def _sign_request(self, method, url, query_string, body): + def _sign_request( + self, + method: str, + url: str, + query_string: Optional[str], + body: Optional[Union[str, bytes]], + ) -> Dict[str, str]: """ This method helps in signing the request by injecting the required headers. :param prepared_request: unsigned headers @@ -50,7 +61,21 @@ def _sign_request(self, method, url, query_string, body): data=body, ) - sig_v4_auth = SigV4Auth(self.credentials, self.service, self.region) + # credentials objects expose access_key, secret_key and token attributes + # via @property annotations that call _refresh() on every access, + # creating a race condition if the credentials expire before secret_key + # is called but after access_key- the end result is the access_key doesn't + # correspond to the secret_key used to sign the request. To avoid this, + # get_frozen_credentials() which returns non-refreshing credentials is + # called if it exists. + credentials = ( + self.credentials.get_frozen_credentials() + if hasattr(self.credentials, "get_frozen_credentials") + and callable(self.credentials.get_frozen_credentials) + else self.credentials + ) + + sig_v4_auth = SigV4Auth(credentials, self.service, self.region) sig_v4_auth.add_auth(aws_request) aws_request.headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload(aws_request) diff --git a/opensearchpy/helpers/asyncsigner.pyi b/opensearchpy/helpers/asyncsigner.pyi deleted file mode 100644 index 2c701bb9..00000000 --- a/opensearchpy/helpers/asyncsigner.pyi +++ /dev/null @@ -1,18 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Dict, List - -class AWSV4SignerAsyncAuth: - @property - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - @property - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - @property - def _sign_request(self, *args: Any, **kwargs: Any) -> Dict[str, List[str]]: ... diff --git a/opensearchpy/helpers/document.py b/opensearchpy/helpers/document.py index 7d45275a..f1673ce7 100644 --- a/opensearchpy/helpers/document.py +++ b/opensearchpy/helpers/document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from fnmatch import fnmatch +from typing import Any, Tuple, Type from six import add_metaclass, iteritems @@ -45,12 +43,17 @@ class MetaField(object): - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.args, self.kwargs = args, kwargs class DocumentMeta(type): - def __new__(cls, name, bases, attrs): + def __new__( + cls: Any, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: # DocumentMeta filters attrs in place attrs["_doc_type"] = DocumentOptions(name, bases, attrs) return super(DocumentMeta, cls).__new__(cls, name, bases, attrs) @@ -61,7 +64,12 @@ class IndexMeta(DocumentMeta): # class, only user defined subclasses should have an _index attr _document_initialized = False - def __new__(cls, name, bases, attrs): + def __new__( + cls: Any, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> Any: new_cls = super(IndexMeta, cls).__new__(cls, name, bases, attrs) if cls._document_initialized: index_opts = attrs.pop("Index", None) @@ -72,7 +80,7 @@ def __new__(cls, name, bases, attrs): return new_cls @classmethod - def construct_index(cls, opts, bases): + def construct_index(cls, opts: Any, bases: Any) -> Any: if opts is None: for b in bases: if hasattr(b, "_index"): @@ -90,7 +98,12 @@ def construct_index(cls, opts, bases): class DocumentOptions(object): - def __init__(self, name, bases, attrs): + def __init__( + self, + name: str, + bases: Tuple[Type[ObjectBase]], + attrs: Any, + ) -> None: meta = attrs.pop("Meta", None) # create the mapping instance @@ -114,7 +127,7 @@ def __init__(self, name, bases, attrs): self.mapping.update(b._doc_type.mapping, update_only=True) @property - def name(self): + def name(self) -> Any: return self.mapping.properties.name @@ -125,7 +138,7 @@ class InnerDoc(ObjectBase): """ @classmethod - def from_opensearch(cls, data, data_only=False): + def from_opensearch(cls, data: Any, data_only: bool = False) -> Any: if data_only: data = {"_source": data} return super(InnerDoc, cls).from_opensearch(data) @@ -138,25 +151,25 @@ class Document(ObjectBase): """ @classmethod - def _matches(cls, hit): + def _matches(cls: Any, hit: Any) -> Any: if cls._index._name is None: return True return fnmatch(hit.get("_index", ""), cls._index._name) @classmethod - def _get_using(cls, using=None): + def _get_using(cls: Any, using: Any = None) -> Any: return using or cls._index._using @classmethod - def _get_connection(cls, using=None): + def _get_connection(cls, using: Any = None) -> Any: return get_connection(cls._get_using(using)) @classmethod - def _default_index(cls, index=None): + def _default_index(cls: Any, index: Any = None) -> Any: return index or cls._index._name @classmethod - def init(cls, index=None, using=None): + def init(cls: Any, index: Any = None, using: Any = None) -> None: """ Create the index and populate the mappings in opensearch. """ @@ -165,7 +178,7 @@ def init(cls, index=None, using=None): i = i.clone(name=index) i.save(using=using) - def _get_index(self, index=None, required=True): + def _get_index(self, index: Any = None, required: bool = True) -> Any: if index is None: index = getattr(self.meta, "index", None) if index is None: @@ -176,7 +189,7 @@ def _get_index(self, index=None, required=True): raise ValidationException("You cannot write to a wildcard index.") return index - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format( self.__class__.__name__, ", ".join( @@ -187,7 +200,7 @@ def __repr__(self): ) @classmethod - def search(cls, using=None, index=None): + def search(cls, using: Any = None, index: Any = None) -> Any: """ Create an :class:`~opensearchpy.Search` instance that will search over this ``Document``. @@ -197,7 +210,7 @@ def search(cls, using=None, index=None): ) @classmethod - def get(cls, id, using=None, index=None, **kwargs): + def get(cls: Any, id: Any, using: Any = None, index: Any = None, **kwargs: Any) -> Any: # type: ignore """ Retrieve a single document from opensearch using its ``id``. @@ -216,7 +229,9 @@ def get(cls, id, using=None, index=None, **kwargs): return cls.from_opensearch(doc) @classmethod - def exists(cls, id, using=None, index=None, **kwargs): + def exists( + cls, id: Any, using: Any = None, index: Any = None, **kwargs: Any + ) -> Any: """ check if exists a single document from opensearch using its ``id``. @@ -233,13 +248,19 @@ def exists(cls, id, using=None, index=None, **kwargs): @classmethod def mget( - cls, docs, using=None, index=None, raise_on_error=True, missing="none", **kwargs - ): - r""" - Retrieve multiple document by their ``id``\s. Returns a list of instances + cls, + docs: Any, + using: Any = None, + index: Any = None, + raise_on_error: bool = True, + missing: str = "none", + **kwargs: Any, + ) -> Any: + """ + Retrieve multiple document by their ``id``'s. Returns a list of instances in the same order as requested. - :arg docs: list of ``id``\s of the documents to be retrieved or a list + :arg docs: list of ``id``'s of the documents to be retrieved or a list of document specifications as per https://opensearch.org/docs/latest/opensearch/rest-api/document-apis/multi-get/ :arg index: opensearch index to use, if the ``Document`` is @@ -263,7 +284,9 @@ def mget( } results = opensearch.mget(body, index=cls._default_index(index), **kwargs) - objs, error_docs, missing_docs = [], [], [] + objs: Any = [] + error_docs: Any = [] + missing_docs: Any = [] for doc in results["docs"]: if doc.get("found"): if error_docs or missing_docs: @@ -296,7 +319,7 @@ def mget( raise NotFoundError(404, message, {"docs": missing_docs}) return objs - def delete(self, using=None, index=None, **kwargs): + def delete(self, using: Any = None, index: Any = None, **kwargs: Any) -> Any: """ Delete the instance in opensearch. @@ -319,7 +342,7 @@ def delete(self, using=None, index=None, **kwargs): doc_meta.update(kwargs) opensearch.delete(index=self._get_index(index), **doc_meta) - def to_dict(self, include_meta=False, skip_empty=True): + def to_dict(self, include_meta: bool = False, skip_empty: bool = True) -> Any: # type: ignore """ Serialize the instance into a dictionary so that it can be saved in opensearch. @@ -347,19 +370,19 @@ def to_dict(self, include_meta=False, skip_empty=True): def update( self, - using=None, - index=None, - detect_noop=True, - doc_as_upsert=False, - refresh=False, - retry_on_conflict=None, - script=None, - script_id=None, - scripted_upsert=False, - upsert=None, - return_doc_meta=False, - **fields - ): + using: Any = None, + index: Any = None, + detect_noop: bool = True, + doc_as_upsert: bool = False, + refresh: bool = False, + retry_on_conflict: Any = None, + script: Any = None, + script_id: Any = None, + scripted_upsert: bool = False, + upsert: Any = None, + return_doc_meta: bool = False, + **fields: Any, + ) -> Any: """ Partial update of the document, specify fields you wish to update and both the instance and the document in opensearch will be updated:: @@ -388,7 +411,7 @@ def update( :return operation result noop/updated """ - body = { + body: Any = { "doc_as_upsert": doc_as_upsert, "detect_noop": detect_noop, } @@ -452,13 +475,13 @@ def update( def save( self, - using=None, - index=None, - validate=True, - skip_empty=True, - return_doc_meta=False, - **kwargs - ): + using: Any = None, + index: Any = None, + validate: bool = True, + skip_empty: bool = True, + return_doc_meta: bool = False, + **kwargs: Any, + ) -> Any: """ Save the document into opensearch. If the document doesn't exist it is created, it is overwritten otherwise. Returns ``True`` if this @@ -495,7 +518,7 @@ def save( meta = opensearch.index( index=self._get_index(index), body=self.to_dict(skip_empty=skip_empty), - **doc_meta + **doc_meta, ) # update meta information from OpenSearch for k in META_FIELDS: diff --git a/opensearchpy/helpers/document.pyi b/opensearchpy/helpers/document.pyi deleted file mode 100644 index d740b931..00000000 --- a/opensearchpy/helpers/document.pyi +++ /dev/null @@ -1,36 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# THIS FILE IS AUTOMATICALLY GENERATED, DO NOT EDIT. - -from .utils import ObjectBase - -class MetaField(object): ... -class DocumentMeta(type): ... -class IndexMeta(DocumentMeta): ... -class DocumentOptions(object): ... -class InnerDoc(ObjectBase): ... -class Document(ObjectBase): ... diff --git a/opensearchpy/helpers/errors.py b/opensearchpy/helpers/errors.py index dc9e62da..220b6b31 100644 --- a/opensearchpy/helpers/errors.py +++ b/opensearchpy/helpers/errors.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,17 +26,21 @@ # under the License. +from typing import Any, List + from ..exceptions import OpenSearchException class BulkIndexError(OpenSearchException): @property - def errors(self): + def errors(self) -> List[Any]: """List of errors from execution of the last chunk.""" - return self.args[1] + return self.args[1] # type: ignore class ScanError(OpenSearchException): - def __init__(self, scroll_id, *args, **kwargs): - super(ScanError, self).__init__(*args, **kwargs) # type: ignore + scroll_id: str + + def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: + super(ScanError, self).__init__(*args, **kwargs) self.scroll_id = scroll_id diff --git a/opensearchpy/helpers/errors.pyi b/opensearchpy/helpers/errors.pyi deleted file mode 100644 index bed92df7..00000000 --- a/opensearchpy/helpers/errors.pyi +++ /dev/null @@ -1,37 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, List - -from ..exceptions import OpenSearchException - -class BulkIndexError(OpenSearchException): - @property - def errors(self) -> List[Any]: ... - -class ScanError(OpenSearchException): - scroll_id: str - def __init__(self, scroll_id: str, *args: Any, **kwargs: Any) -> None: ... diff --git a/opensearchpy/helpers/faceted_search.py b/opensearchpy/helpers/faceted_search.py index 6da84dc2..e9ae14ef 100644 --- a/opensearchpy/helpers/faceted_search.py +++ b/opensearchpy/helpers/faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. from datetime import datetime, timedelta +from typing import Any, Optional from six import iteritems, itervalues @@ -52,16 +54,18 @@ class Facet(object): from the result of the aggregation. """ - agg_type = None + agg_type: Optional[str] = None - def __init__(self, metric=None, metric_sort="desc", **kwargs): + def __init__( + self, metric: Any = None, metric_sort: str = "desc", **kwargs: Any + ) -> None: self.filter_values = () self._params = kwargs self._metric = metric if metric and metric_sort: self._params["order"] = {"metric": metric_sort} - def get_aggregation(self): + def get_aggregation(self) -> Any: """ Return the aggregation object. """ @@ -70,7 +74,7 @@ def get_aggregation(self): agg.metric("metric", self._metric) return agg - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """ Construct a filter. """ @@ -82,25 +86,25 @@ def add_filter(self, filter_values): f |= self.get_value_filter(v) return f - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: """ Construct a filter for an individual value """ pass - def is_filtered(self, key, filter_values): + def is_filtered(self, key: Any, filter_values: Any) -> bool: """ Is a filter active on the given key. """ return key in filter_values - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: """ return a value representing a bucket. Its key as default. """ return bucket["key"] - def get_metric(self, bucket): + def get_metric(self, bucket: Any) -> Any: """ Return a metric, by default doc_count for a bucket. """ @@ -108,7 +112,7 @@ def get_metric(self, bucket): return bucket["metric"]["value"] return bucket["doc_count"] - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: """ Turn the raw bucket data into a list of tuples containing the key, number of documents and a flag indicating whether this value has been @@ -124,9 +128,9 @@ def get_values(self, data, filter_values): class TermsFacet(Facet): - agg_type = "terms" + agg_type: Optional[str] = "terms" - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: """Create a terms filter instead of bool containing term filters.""" if filter_values: return Terms( @@ -137,7 +141,7 @@ def add_filter(self, filter_values): class RangeFacet(Facet): agg_type = "range" - def _range_to_dict(self, range): + def _range_to_dict(self, range: Any) -> Any: key, range = range out = {"key": key} if range[0] is not None: @@ -146,13 +150,13 @@ def _range_to_dict(self, range): out["to"] = range[1] return out - def __init__(self, ranges, **kwargs): + def __init__(self, ranges: Any, **kwargs: Any) -> None: super(RangeFacet, self).__init__(**kwargs) self._params["ranges"] = list(map(self._range_to_dict, ranges)) self._params["keyed"] = False self._ranges = dict(ranges) - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: f, t = self._ranges[filter_value] limits = {} if f is not None: @@ -166,7 +170,7 @@ def get_value_filter(self, filter_value): class HistogramFacet(Facet): agg_type = "histogram" - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: return Range( _expand__to_dot=False, **{ @@ -178,25 +182,25 @@ def get_value_filter(self, filter_value): ) -def _date_interval_year(d): +def _date_interval_year(d: Any) -> Any: return d.replace( year=d.year + 1, day=(28 if d.month == 2 and d.day == 29 else d.day) ) -def _date_interval_month(d): +def _date_interval_month(d: Any) -> Any: return (d + timedelta(days=32)).replace(day=1) -def _date_interval_week(d): +def _date_interval_week(d: Any) -> Any: return d + timedelta(days=7) -def _date_interval_day(d): +def _date_interval_day(d: Any) -> Any: return d + timedelta(days=1) -def _date_interval_hour(d): +def _date_interval_hour(d: Any) -> Any: return d + timedelta(hours=1) @@ -216,22 +220,22 @@ class DateHistogramFacet(Facet): "1h": _date_interval_hour, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: kwargs.setdefault("min_doc_count", 0) super(DateHistogramFacet, self).__init__(**kwargs) - def get_value(self, bucket): + def get_value(self, bucket: Any) -> Any: if not isinstance(bucket["key"], datetime): # OpenSearch returns key=None instead of 0 for date 1970-01-01, # so we need to set key to 0 to avoid TypeError exception if bucket["key"] is None: bucket["key"] = 0 # Preserve milliseconds in the datetime - return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) + return datetime.utcfromtimestamp(int(bucket["key"]) / 1000.0) # type: ignore else: return bucket["key"] - def get_value_filter(self, filter_value): + def get_value_filter(self, filter_value: Any) -> Any: for interval_type in ("calendar_interval", "fixed_interval"): if interval_type in self._params: break @@ -254,17 +258,17 @@ def get_value_filter(self, filter_value): class NestedFacet(Facet): agg_type = "nested" - def __init__(self, path, nested_facet): + def __init__(self, path: Any, nested_facet: Any) -> None: self._path = path self._inner = nested_facet super(NestedFacet, self).__init__( path=path, aggs={"inner": nested_facet.get_aggregation()} ) - def get_values(self, data, filter_values): + def get_values(self, data: Any, filter_values: Any) -> Any: return self._inner.get_values(data.inner, filter_values) - def add_filter(self, filter_values): + def add_filter(self, filter_values: Any) -> Any: inner_q = self._inner.add_filter(filter_values) if inner_q: return Nested(path=self._path, query=inner_q) @@ -272,11 +276,11 @@ def add_filter(self, filter_values): class FacetedResponse(Response): @property - def query_string(self): + def query_string(self) -> Any: return self._faceted_search._query @property - def facets(self): + def facets(self) -> Any: if not hasattr(self, "_facets"): super(AttrDict, self).__setattr__("_facets", AttrDict({})) for name, facet in iteritems(self._faceted_search.facets): @@ -329,38 +333,38 @@ def search(self): """ - index = None - doc_types = None - fields = None - facets = {} + index: Any = None + doc_types: Any = None + fields: Any = None + facets: Any = {} using = "default" - def __init__(self, query=None, filters={}, sort=()): + def __init__(self, query: Any = None, filters: Any = {}, sort: Any = ()) -> None: """ :arg query: the text to search for :arg filters: facet values to filter :arg sort: sort information to be passed to :class:`~opensearchpy.Search` """ self._query = query - self._filters = {} + self._filters: Any = {} self._sort = sort - self.filter_values = {} + self.filter_values: Any = {} for name, value in iteritems(filters): self.add_filter(name, value) self._s = self.build_search() - def count(self): + def count(self) -> Any: return self._s.count() - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: self._s = self._s[k] return self - def __iter__(self): + def __iter__(self) -> Any: return iter(self._s) - def add_filter(self, name, filter_values): + def add_filter(self, name: Any, filter_values: Any) -> Any: """ Add a filter for a facet. """ @@ -382,7 +386,7 @@ def add_filter(self, name, filter_values): self._filters[name] = f - def search(self): + def search(self) -> Any: """ Returns the base Search object to which the facets are added. @@ -392,7 +396,7 @@ def search(self): s = Search(doc_type=self.doc_types, index=self.index, using=self.using) return s.response_class(FacetedResponse) - def query(self, search, query): + def query(self, search: Any, query: Any) -> Any: """ Add query part to ``search``. @@ -405,7 +409,7 @@ def query(self, search, query): return search.query("multi_match", query=query) return search - def aggregate(self, search): + def aggregate(self, search: Any) -> Any: """ Add aggregations representing the facets selected, including potential filters. @@ -421,7 +425,7 @@ def aggregate(self, search): f, agg ) - def filter(self, search): + def filter(self, search: Any) -> Any: """ Add a ``post_filter`` to the search request narrowing the results based on the facet filters. @@ -434,7 +438,7 @@ def filter(self, search): post_filter &= f return search.post_filter(post_filter) - def highlight(self, search): + def highlight(self, search: Any) -> Any: """ Add highlighting for all the fields """ @@ -442,7 +446,7 @@ def highlight(self, search): *(f if "^" not in f else f.split("^", 1)[0] for f in self.fields) ) - def sort(self, search): + def sort(self, search: Any) -> Any: """ Add sorting information to the request. """ @@ -450,7 +454,7 @@ def sort(self, search): search = search.sort(*self._sort) return search - def build_search(self): + def build_search(self) -> Any: """ Construct the ``Search`` object. """ @@ -463,7 +467,7 @@ def build_search(self): self.aggregate(s) return s - def execute(self): + def execute(self) -> Any: """ Execute the search and return the response. """ diff --git a/opensearchpy/helpers/faceted_search.pyi b/opensearchpy/helpers/faceted_search.pyi deleted file mode 100644 index 3f1d175b..00000000 --- a/opensearchpy/helpers/faceted_search.pyi +++ /dev/null @@ -1,36 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from opensearchpy.helpers.response import Response - -class Facet(object): ... -class TermsFacet(Facet): ... -class RangeFacet(Facet): ... -class HistogramFacet(Facet): ... -class DateHistogramFacet(Facet): ... -class NestedFacet(Facet): ... -class FacetedResponse(Response): ... -class FacetedSearch(object): ... diff --git a/opensearchpy/helpers/field.py b/opensearchpy/helpers/field.py index 756a3a0e..4ffd21d8 100644 --- a/opensearchpy/helpers/field.py +++ b/opensearchpy/helpers/field.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,15 +26,11 @@ # under the License. import base64 +import collections.abc as collections_abc import copy import ipaddress - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - from datetime import date, datetime +from typing import Any, Optional, Type from dateutil import parser, tz from six import integer_types, iteritems, string_types @@ -44,17 +41,17 @@ from .utils import AttrDict, AttrList, DslBase from .wrappers import Range -unicode = type("") +unicode: Type[str] = type("") -def construct_field(name_or_field, **params): +def construct_field(name_or_field: Any, **params: Any) -> Any: # {"type": "text", "analyzer": "snowball"} if isinstance(name_or_field, collections_abc.Mapping): if params: raise ValueError( "construct_field() cannot accept parameters when passing in a dict." ) - params = name_or_field.copy() + params = name_or_field.copy() # type: ignore if "type" not in params: # inner object can be implicitly defined if "properties" in params: @@ -79,14 +76,16 @@ def construct_field(name_or_field, **params): class Field(DslBase): - _type_name = "field" + _type_name: str = "field" _type_shortcut = staticmethod(construct_field) # all fields can be multifields _param_defs = {"fields": {"type": "field", "hash": True}} - name = None - _coerce = False + name: Optional[str] = None + _coerce: bool = False - def __init__(self, multi=False, required=False, *args, **kwargs): + def __init__( + self, multi: bool = False, required: bool = False, *args: Any, **kwargs: Any + ) -> None: """ :arg bool multi: specifies whether field can contain array of values :arg bool required: specifies whether field is required @@ -95,29 +94,29 @@ def __init__(self, multi=False, required=False, *args, **kwargs): self._required = required super(Field, self).__init__(*args, **kwargs) - def __getitem__(self, subfield): + def __getitem__(self, subfield: Any) -> Any: return self._params.get("fields", {})[subfield] - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return data - def _empty(self): + def _empty(self) -> None: return None - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([]) return self._empty() - def serialize(self, data): + def serialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): return list(map(self._serialize, data)) return self._serialize(data) - def deserialize(self, data): + def deserialize(self, data: Any) -> Any: if isinstance(data, (list, AttrList, tuple)): data = [None if d is None else self._deserialize(d) for d in data] return data @@ -125,14 +124,14 @@ def deserialize(self, data): return None return self._deserialize(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data in (None, [], {}) and self._required: raise ValidationException("Value required for this field.") return data - def to_dict(self): + def to_dict(self) -> Any: d = super(Field, self).to_dict() name, value = d.popitem() value["type"] = name @@ -143,7 +142,7 @@ class CustomField(Field): name = "custom" _coerce = True - def to_dict(self): + def to_dict(self) -> Any: if isinstance(self.builtin_type, Field): return self.builtin_type.to_dict() @@ -153,10 +152,16 @@ def to_dict(self): class Object(Field): - name = "object" - _coerce = True - - def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): + name: Optional[str] = "object" + _coerce: bool = True + + def __init__( + self, + doc_class: Any = None, + dynamic: Any = None, + properties: Any = None, + **kwargs: Any + ) -> None: """ :arg document.InnerDoc doc_class: base doc class that handles mapping. If no `doc_class` is provided, new instance of `InnerDoc` will be created, @@ -172,7 +177,7 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): "doc_class and properties/dynamic should not be provided together" ) if doc_class: - self._doc_class = doc_class + self._doc_class: Any = doc_class else: # FIXME import from opensearchpy.helpers.document import InnerDoc @@ -180,39 +185,39 @@ def __init__(self, doc_class=None, dynamic=None, properties=None, **kwargs): # no InnerDoc subclass, creating one instead... self._doc_class = type("InnerDoc", (InnerDoc,), {}) for name, field in iteritems(properties or {}): - self._doc_class._doc_type.mapping.field(name, field) + self._doc_class._doc_type.mapping.field(name, field) # type: ignore if dynamic is not None: - self._doc_class._doc_type.mapping.meta("dynamic", dynamic) + self._doc_class._doc_type.mapping.meta("dynamic", dynamic) # type: ignore self._mapping = copy.deepcopy(self._doc_class._doc_type.mapping) super(Object, self).__init__(**kwargs) - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self._mapping[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self._mapping - def _empty(self): + def _empty(self) -> Any: return self._wrap({}) - def _wrap(self, data): + def _wrap(self, data: Any) -> Any: return self._doc_class.from_opensearch(data, data_only=True) - def empty(self): + def empty(self) -> Any: if self._multi: return AttrList([], self._wrap) return self._empty() - def to_dict(self): + def to_dict(self) -> Any: d = self._mapping.to_dict() d.update(super(Object, self).to_dict()) return d - def _collect_fields(self): + def _collect_fields(self) -> Any: return self._mapping.properties._collect_fields() - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # don't wrap already wrapped data if isinstance(data, self._doc_class): return data @@ -222,7 +227,7 @@ def _deserialize(self, data): return self._wrap(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None @@ -232,7 +237,7 @@ def _serialize(self, data): return data.to_dict() - def clean(self, data): + def clean(self, data: Any) -> Any: data = super(Object, self).clean(data) if data is None: return None @@ -243,7 +248,7 @@ def clean(self, data): data.full_clean() return data - def update(self, other, update_only=False): + def update(self, other: "Object", update_only: bool = False) -> None: if not isinstance(other, Object): # not an inner/nested object, no merge possible return @@ -252,18 +257,18 @@ def update(self, other, update_only=False): class Nested(Object): - name = "nested" + name: Optional[str] = "nested" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: kwargs.setdefault("multi", True) super(Nested, self).__init__(*args, **kwargs) class Date(Field): - name = "date" - _coerce = True + name: Optional[str] = "date" + _coerce: bool = True - def __init__(self, default_timezone=None, *args, **kwargs): + def __init__(self, default_timezone: Any = None, *args: Any, **kwargs: Any) -> None: """ :arg default_timezone: timezone that will be automatically used for tz-naive values May be instance of `datetime.tzinfo` or string containing TZ offset @@ -273,7 +278,7 @@ def __init__(self, default_timezone=None, *args, **kwargs): self._default_timezone = tz.gettz(self._default_timezone) super(Date, self).__init__(*args, **kwargs) - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, string_types): try: data = parser.parse(data) @@ -302,7 +307,7 @@ class Text(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "text" + name: Optional[str] = "text" class SearchAsYouType(Field): @@ -311,7 +316,7 @@ class SearchAsYouType(Field): "search_analyzer": {"type": "analyzer"}, "search_quote_analyzer": {"type": "analyzer"}, } - name = "search_as_you_type" + name: Optional[str] = "search_as_you_type" class Keyword(Field): @@ -320,23 +325,23 @@ class Keyword(Field): "search_analyzer": {"type": "analyzer"}, "normalizer": {"type": "normalizer"}, } - name = "keyword" + name: Optional[str] = "keyword" class ConstantKeyword(Keyword): - name = "constant_keyword" + name: Optional[str] = "constant_keyword" class Boolean(Field): - name = "boolean" - _coerce = True + name: Optional[str] = "boolean" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if data == "false": return False return bool(data) - def clean(self, data): + def clean(self, data: Any) -> Any: if data is not None: data = self.deserialize(data) if data is None and self._required: @@ -345,108 +350,108 @@ def clean(self, data): class Float(Field): - name = "float" - _coerce = True + name: Optional[str] = "float" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return float(data) class DenseVector(Float): - name = "dense_vector" + name: Optional[str] = "dense_vector" - def __init__(self, dims, **kwargs): + def __init__(self, dims: Any, **kwargs: Any) -> None: kwargs["multi"] = True super(DenseVector, self).__init__(dims=dims, **kwargs) class SparseVector(Field): - name = "sparse_vector" + name: Optional[str] = "sparse_vector" class HalfFloat(Float): - name = "half_float" + name: Optional[str] = "half_float" class ScaledFloat(Float): - name = "scaled_float" + name: Optional[str] = "scaled_float" - def __init__(self, scaling_factor, *args, **kwargs): + def __init__(self, scaling_factor: Any, *args: Any, **kwargs: Any) -> None: super(ScaledFloat, self).__init__( scaling_factor=scaling_factor, *args, **kwargs ) class Double(Float): - name = "double" + name: Optional[str] = "double" class RankFeature(Float): - name = "rank_feature" + name: Optional[str] = "rank_feature" class RankFeatures(Field): - name = "rank_features" + name: Optional[str] = "rank_features" class Integer(Field): - name = "integer" - _coerce = True + name: Optional[str] = "integer" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return int(data) class Byte(Integer): - name = "byte" + name: Optional[str] = "byte" class Short(Integer): - name = "short" + name: Optional[str] = "short" class Long(Integer): - name = "long" + name: Optional[str] = "long" class Ip(Field): - name = "ip" - _coerce = True + name: Optional[str] = "ip" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: # the ipaddress library for pypy only accepts unicode. return ipaddress.ip_address(unicode(data)) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return str(data) class Binary(Field): - name = "binary" - _coerce = True + name: Optional[str] = "binary" + _coerce: bool = True - def clean(self, data): + def clean(self, data: Any) -> Any: # Binary fields are opaque, so there's not much cleaning # that can be done. return data - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return base64.b64decode(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return base64.b64encode(data).decode() class GeoPoint(Field): - name = "geo_point" + name: Optional[str] = "geo_point" class GeoShape(Field): - name = "geo_shape" + name: Optional[str] = "geo_shape" class Completion(Field): @@ -458,29 +463,29 @@ class Completion(Field): class Percolator(Field): - name = "percolator" - _coerce = True + name: Optional[str] = "percolator" + _coerce: bool = True - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: return Q(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None return data.to_dict() class RangeField(Field): - _coerce = True - _core_field = None + _coerce: bool = True + _core_field: Any = None - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Range): return data data = dict((k, self._core_field.deserialize(v)) for k, v in iteritems(data)) return Range(data) - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: if data is None: return None if not isinstance(data, collections_abc.Mapping): @@ -489,42 +494,42 @@ def _serialize(self, data): class IntegerRange(RangeField): - name = "integer_range" - _core_field = Integer() + name: Optional[str] = "integer_range" + _core_field: Any = Integer() class FloatRange(RangeField): - name = "float_range" - _core_field = Float() + name: Optional[str] = "float_range" + _core_field: Any = Float() class LongRange(RangeField): - name = "long_range" - _core_field = Long() + name: Optional[str] = "long_range" + _core_field: Any = Long() class DoubleRange(RangeField): - name = "double_range" - _core_field = Double() + name: Optional[str] = "double_range" + _core_field: Any = Double() class DateRange(RangeField): - name = "date_range" - _core_field = Date() + name: Optional[str] = "date_range" + _core_field: Any = Date() class IpRange(Field): # not a RangeField since ip_range supports CIDR ranges - name = "ip_range" + name: Optional[str] = "ip_range" class Join(Field): - name = "join" + name: Optional[str] = "join" class TokenCount(Field): - name = "token_count" + name: Optional[str] = "token_count" class Murmur3(Field): - name = "murmur3" + name: Optional[str] = "murmur3" diff --git a/opensearchpy/helpers/field.pyi b/opensearchpy/helpers/field.pyi deleted file mode 100644 index 3704aa81..00000000 --- a/opensearchpy/helpers/field.pyi +++ /dev/null @@ -1,70 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Field(DslBase): ... -class CustomField(Field): ... -class Object(Field): ... -class Nested(Object): ... -class Date(Field): ... -class Text(Field): ... -class SearchAsYouType(Field): ... -class Keyword(Field): ... -class ConstantKeyword(Keyword): ... -class Boolean(Field): ... -class Float(Field): ... -class DenseVector(Float): ... -class SparseVector(Field): ... -class HalfFloat(Float): ... -class ScaledFloat(Float): ... -class Double(Float): ... -class RankFeature(Float): ... -class RankFeatures(Field): ... -class Integer(Field): ... -class Byte(Integer): ... -class Short(Integer): ... -class Long(Integer): ... -class Ip(Field): ... -class Binary(Field): ... -class GeoPoint(Field): ... -class GeoShape(Field): ... -class Completion(Field): ... -class Percolator(Field): ... -class RangeField(Field): ... -class IntegerRange(RangeField): ... -class FloatRange(RangeField): ... -class LongRange(RangeField): ... -class DoubleRange(RangeField): ... -class DateRange(RangeField): ... -class IpRange(Field): ... -class Join(Field): ... -class TokenCount(Field): ... -class Murmur3(Field): ... - -def construct_field(name_or_field: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/function.py b/opensearchpy/helpers/function.py index 5b8db7b0..00452f86 100644 --- a/opensearchpy/helpers/function.py +++ b/opensearchpy/helpers/function.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,21 +25,19 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +import collections.abc as collections_abc +from typing import Any, Optional from .utils import DslBase -def SF(name_or_sf, **params): +def SF(name_or_sf: Any, **params: Any) -> Any: # {"script_score": {"script": "_score"}, "filter": {}} if isinstance(name_or_sf, collections_abc.Mapping): if params: raise ValueError("SF() cannot accept parameters when passing in a dict.") kwargs = {} - sf = name_or_sf.copy() + sf = name_or_sf.copy() # type: ignore for k in ScoreFunction._param_defs: if k in name_or_sf: kwargs[k] = sf.pop(k) @@ -73,16 +72,16 @@ def SF(name_or_sf, **params): class ScoreFunction(DslBase): - _type_name = "score_function" + _type_name: str = "score_function" _type_shortcut = staticmethod(SF) _param_defs = { "query": {"type": "query"}, "filter": {"type": "query"}, "weight": {}, } - name = None + name: Optional[str] = None - def to_dict(self): + def to_dict(self) -> Any: d = super(ScoreFunction, self).to_dict() # filter and query dicts should be at the same level as us for k in self._param_defs: @@ -98,7 +97,7 @@ class ScriptScore(ScoreFunction): class BoostFactor(ScoreFunction): name = "boost_factor" - def to_dict(self): + def to_dict(self) -> Any: d = super(BoostFactor, self).to_dict() if "value" in d[self.name]: d[self.name] = d[self.name].pop("value") diff --git a/opensearchpy/helpers/function.pyi b/opensearchpy/helpers/function.pyi deleted file mode 100644 index 58a00fba..00000000 --- a/opensearchpy/helpers/function.pyi +++ /dev/null @@ -1,40 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class ScoreFunction(DslBase): ... -class ScriptScore(ScoreFunction): ... -class BoostFactor(ScoreFunction): ... -class RandomScore(ScoreFunction): ... -class FieldValueFactor(ScoreFunction): ... -class Linear(ScoreFunction): ... -class Gauss(ScoreFunction): ... -class Exp(ScoreFunction): ... - -def SF(name_or_sf: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/index.py b/opensearchpy/helpers/index.py index d6e08b50..3fbb475a 100644 --- a/opensearchpy/helpers/index.py +++ b/opensearchpy/helpers/index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,6 +25,9 @@ # specific language governing permissions and limitations # under the License. +from typing import Any, Optional + +from opensearchpy.client import OpenSearch from opensearchpy.connection.connections import get_connection from opensearchpy.helpers import analysis @@ -35,7 +39,14 @@ class IndexTemplate(object): - def __init__(self, name, template, index=None, order=None, **kwargs): + def __init__( + self, + name: Any, + template: Any, + index: Any = None, + order: Any = None, + **kwargs: Any + ) -> None: if index is None: self._index = Index(template, **kwargs) else: @@ -49,17 +60,17 @@ def __init__(self, name, template, index=None, order=None, **kwargs): self._template_name = name self.order = order - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._index, attr_name) - def to_dict(self): + def to_dict(self) -> Any: d = self._index.to_dict() d["index_patterns"] = [self._index._name] if self.order is not None: d["order"] = self.order return d - def save(self, using=None): + def save(self, using: Any = None) -> Any: opensearch = get_connection(using or self._index._using) return opensearch.indices.put_template( name=self._template_name, body=self.to_dict() @@ -67,25 +78,27 @@ def save(self, using=None): class Index(object): - def __init__(self, name, using="default"): + def __init__(self, name: Any, using: Any = "default") -> None: """ :arg name: name of the index :arg using: connection alias to use, defaults to ``'default'`` """ self._name = name - self._doc_types = [] + self._doc_types: Any = [] self._using = using - self._settings = {} - self._aliases = {} - self._analysis = {} - self._mapping = None + self._settings: Any = {} + self._aliases: Any = {} + self._analysis: Any = {} + self._mapping: Any = None - def get_or_create_mapping(self): + def get_or_create_mapping(self) -> Any: if self._mapping is None: self._mapping = Mapping() return self._mapping - def as_template(self, template_name, pattern=None, order=None): + def as_template( + self, template_name: Any, pattern: Any = None, order: Any = None + ) -> Any: # TODO: should we allow pattern to be a top-level arg? # or maybe have an IndexPattern that allows for it and have # Document._index be that? @@ -93,7 +106,7 @@ def as_template(self, template_name, pattern=None, order=None): template_name, pattern or self._name, index=self, order=order ) - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: for doc in self._doc_types: nested, field = doc._doc_type.mapping.resolve_nested(field_path) if field is not None: @@ -102,7 +115,7 @@ def resolve_nested(self, field_path): return self._mapping.resolve_nested(field_path) return (), None - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: for doc in self._doc_types: field = doc._doc_type.mapping.resolve_field(field_path) if field is not None: @@ -111,12 +124,12 @@ def resolve_field(self, field_path): return self._mapping.resolve_field(field_path) return None - def load_mappings(self, using=None): + def load_mappings(self, using: Optional[OpenSearch] = None) -> None: self.get_or_create_mapping().update_from_opensearch( self._name, using=using or self._using ) - def clone(self, name=None, using=None): + def clone(self, name: Any = None, using: Any = None) -> Any: """ Create a copy of the instance with another name or connection alias. Useful for creating multiple indices with shared configuration:: @@ -140,14 +153,14 @@ def clone(self, name=None, using=None): i._mapping = self._mapping._clone() return i - def _get_connection(self, using=None): + def _get_connection(self, using: Any = None) -> Any: if self._name is None: raise ValueError("You cannot perform API calls on the default index.") return get_connection(using or self._using) connection = property(_get_connection) - def mapping(self, mapping): + def mapping(self, mapping: Any) -> Any: """ Associate a mapping (an instance of :class:`~opensearchpy.Mapping`) with this index. @@ -156,7 +169,7 @@ def mapping(self, mapping): """ self.get_or_create_mapping().update(mapping) - def document(self, document): + def document(self, document: Any) -> Any: """ Associate a :class:`~opensearchpy.Document` subclass with an index. This means that, when this index is created, it will contain the @@ -187,7 +200,7 @@ class Post(Document): return document - def settings(self, **kwargs): + def settings(self, **kwargs: Any) -> Any: """ Add settings to the index:: @@ -200,7 +213,7 @@ def settings(self, **kwargs): self._settings.update(kwargs) return self - def aliases(self, **kwargs): + def aliases(self, **kwargs: Any) -> Any: """ Add aliases to the index definition:: @@ -210,7 +223,7 @@ def aliases(self, **kwargs): self._aliases.update(kwargs) return self - def analyzer(self, *args, **kwargs): + def analyzer(self, *args: Any, **kwargs: Any) -> Any: """ Explicitly add an analyzer to an index. Note that all custom analyzers defined in mappings will also be created. This is useful for search analyzers. @@ -237,14 +250,14 @@ def analyzer(self, *args, **kwargs): # merge the definition merge(self._analysis, d, True) - def to_dict(self): + def to_dict(self) -> Any: out = {} if self._settings: out["settings"] = self._settings if self._aliases: out["aliases"] = self._aliases - mappings = self._mapping.to_dict() if self._mapping else {} - analysis = self._mapping._collect_analysis() if self._mapping else {} + mappings: Any = self._mapping.to_dict() if self._mapping else {} + analysis: Any = self._mapping._collect_analysis() if self._mapping else {} for d in self._doc_types: mapping = d._doc_type.mapping merge(mappings, mapping.to_dict(), True) @@ -256,7 +269,7 @@ def to_dict(self): out.setdefault("settings", {})["analysis"] = analysis return out - def search(self, using=None): + def search(self, using: Optional[OpenSearch] = None) -> Search: """ Return a :class:`~opensearchpy.Search` object searching over the index (or all the indices belonging to this template) and its @@ -266,7 +279,7 @@ def search(self, using=None): using=using or self._using, index=self._name, doc_type=self._doc_types ) - def updateByQuery(self, using=None): + def updateByQuery(self, using: Optional[OpenSearch] = None) -> UpdateByQuery: """ Return a :class:`~opensearchpy.UpdateByQuery` object searching over the index (or all the indices belonging to this template) and updating Documents that match @@ -280,7 +293,7 @@ def updateByQuery(self, using=None): index=self._name, ) - def create(self, using=None, **kwargs): + def create(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Creates the index in opensearch. @@ -291,13 +304,13 @@ def create(self, using=None, **kwargs): index=self._name, body=self.to_dict(), **kwargs ) - def is_closed(self, using=None): + def is_closed(self, using: Optional[OpenSearch] = None) -> Any: state = self._get_connection(using).cluster.state( index=self._name, metric="metadata" ) return state["metadata"]["indices"][self._name]["state"] == "close" - def save(self, using=None): + def save(self, using: Optional[OpenSearch] = None) -> Any: """ Sync the index definition with opensearch, creating the index if it doesn't exist and updating its settings and mappings if it does. @@ -351,7 +364,7 @@ def save(self, using=None): if mappings: self.put_mapping(using=using, body=mappings) - def analyze(self, using=None, **kwargs): + def analyze(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Perform the analysis process on a text and return the tokens breakdown of the text. @@ -361,7 +374,7 @@ def analyze(self, using=None, **kwargs): """ return self._get_connection(using).indices.analyze(index=self._name, **kwargs) - def refresh(self, using=None, **kwargs): + def refresh(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a refresh operation on the index. @@ -370,7 +383,7 @@ def refresh(self, using=None, **kwargs): """ return self._get_connection(using).indices.refresh(index=self._name, **kwargs) - def flush(self, using=None, **kwargs): + def flush(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Performs a flush operation on the index. @@ -379,7 +392,7 @@ def flush(self, using=None, **kwargs): """ return self._get_connection(using).indices.flush(index=self._name, **kwargs) - def get(self, using=None, **kwargs): + def get(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The get index API allows to retrieve information about the index. @@ -388,7 +401,7 @@ def get(self, using=None, **kwargs): """ return self._get_connection(using).indices.get(index=self._name, **kwargs) - def open(self, using=None, **kwargs): + def open(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Opens the index in opensearch. @@ -397,7 +410,7 @@ def open(self, using=None, **kwargs): """ return self._get_connection(using).indices.open(index=self._name, **kwargs) - def close(self, using=None, **kwargs): + def close(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Closes the index in opensearch. @@ -406,7 +419,7 @@ def close(self, using=None, **kwargs): """ return self._get_connection(using).indices.close(index=self._name, **kwargs) - def delete(self, using=None, **kwargs): + def delete(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Deletes the index in opensearch. @@ -415,7 +428,7 @@ def delete(self, using=None, **kwargs): """ return self._get_connection(using).indices.delete(index=self._name, **kwargs) - def exists(self, using=None, **kwargs): + def exists(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Returns ``True`` if the index already exists in opensearch. @@ -424,7 +437,7 @@ def exists(self, using=None, **kwargs): """ return self._get_connection(using).indices.exists(index=self._name, **kwargs) - def put_mapping(self, using=None, **kwargs): + def put_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Register specific mapping definition for a specific type. @@ -435,7 +448,7 @@ def put_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_mapping(self, using=None, **kwargs): + def get_mapping(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve specific mapping definition for a specific type. @@ -446,7 +459,9 @@ def get_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_field_mapping(self, using=None, **kwargs): + def get_field_mapping( + self, using: Optional[OpenSearch] = None, **kwargs: Any + ) -> Any: """ Retrieve mapping definition of a specific field. @@ -457,7 +472,7 @@ def get_field_mapping(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_alias(self, using=None, **kwargs): + def put_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Create an alias for the index. @@ -466,7 +481,7 @@ def put_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.put_alias(index=self._name, **kwargs) - def exists_alias(self, using=None, **kwargs): + def exists_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Return a boolean indicating whether given alias exists for this index. @@ -477,7 +492,7 @@ def exists_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_alias(self, using=None, **kwargs): + def get_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve a specified alias. @@ -486,7 +501,7 @@ def get_alias(self, using=None, **kwargs): """ return self._get_connection(using).indices.get_alias(index=self._name, **kwargs) - def delete_alias(self, using=None, **kwargs): + def delete_alias(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Delete specific alias. @@ -497,7 +512,7 @@ def delete_alias(self, using=None, **kwargs): index=self._name, **kwargs ) - def get_settings(self, using=None, **kwargs): + def get_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve settings for the index. @@ -508,7 +523,7 @@ def get_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def put_settings(self, using=None, **kwargs): + def put_settings(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Change specific index level settings in real time. @@ -519,7 +534,7 @@ def put_settings(self, using=None, **kwargs): index=self._name, **kwargs ) - def stats(self, using=None, **kwargs): + def stats(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Retrieve statistics on different operations happening on the index. @@ -528,7 +543,7 @@ def stats(self, using=None, **kwargs): """ return self._get_connection(using).indices.stats(index=self._name, **kwargs) - def segments(self, using=None, **kwargs): + def segments(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provide low level segments information that a Lucene index (shard level) is built with. @@ -538,7 +553,7 @@ def segments(self, using=None, **kwargs): """ return self._get_connection(using).indices.segments(index=self._name, **kwargs) - def validate_query(self, using=None, **kwargs): + def validate_query(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Validate a potentially expensive query without executing it. @@ -549,7 +564,7 @@ def validate_query(self, using=None, **kwargs): index=self._name, **kwargs ) - def clear_cache(self, using=None, **kwargs): + def clear_cache(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Clear all caches or specific cached associated with the index. @@ -560,7 +575,7 @@ def clear_cache(self, using=None, **kwargs): index=self._name, **kwargs ) - def recovery(self, using=None, **kwargs): + def recovery(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The indices recovery API provides insight into on-going shard recoveries for the index. @@ -570,7 +585,7 @@ def recovery(self, using=None, **kwargs): """ return self._get_connection(using).indices.recovery(index=self._name, **kwargs) - def upgrade(self, using=None, **kwargs): + def upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Upgrade the index to the latest format. @@ -579,7 +594,7 @@ def upgrade(self, using=None, **kwargs): """ return self._get_connection(using).indices.upgrade(index=self._name, **kwargs) - def get_upgrade(self, using=None, **kwargs): + def get_upgrade(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Monitor how much of the index is upgraded. @@ -590,7 +605,7 @@ def get_upgrade(self, using=None, **kwargs): index=self._name, **kwargs ) - def shard_stores(self, using=None, **kwargs): + def shard_stores(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ Provides store information for shard copies of the index. Store information reports on which nodes shard copies exist, the shard copy @@ -604,7 +619,7 @@ def shard_stores(self, using=None, **kwargs): index=self._name, **kwargs ) - def forcemerge(self, using=None, **kwargs): + def forcemerge(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The force merge API allows to force merging of the index through an API. The merge relates to the number of segments a Lucene index holds @@ -622,7 +637,7 @@ def forcemerge(self, using=None, **kwargs): index=self._name, **kwargs ) - def shrink(self, using=None, **kwargs): + def shrink(self, using: Optional[OpenSearch] = None, **kwargs: Any) -> Any: """ The shrink index API allows you to shrink an existing index into a new index with fewer primary shards. The number of primary shards in the diff --git a/opensearchpy/helpers/index.pyi b/opensearchpy/helpers/index.pyi deleted file mode 100644 index 2bf5747e..00000000 --- a/opensearchpy/helpers/index.pyi +++ /dev/null @@ -1,28 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class IndexTemplate(object): ... -class Index(object): ... diff --git a/opensearchpy/helpers/mapping.py b/opensearchpy/helpers/mapping.py index 9270da97..eaa13e3f 100644 --- a/opensearchpy/helpers/mapping.py +++ b/opensearchpy/helpers/mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,12 +25,9 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any from six import iteritems, itervalues @@ -55,26 +53,26 @@ class Properties(DslBase): name = "properties" _param_defs = {"properties": {"type": "field", "hash": True}} - def __init__(self): + def __init__(self) -> None: super(Properties, self).__init__() - def __repr__(self): + def __repr__(self) -> str: return "Properties()" - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties[name] - def __contains__(self, name): + def __contains__(self, name: Any) -> bool: return name in self.properties - def to_dict(self): + def to_dict(self) -> Any: return super(Properties, self).to_dict()["properties"] - def field(self, name, *args, **kwargs): + def field(self, name: Any, *args: Any, **kwargs: Any) -> "Properties": self.properties[name] = construct_field(*args, **kwargs) return self - def _collect_fields(self): + def _collect_fields(self) -> Any: """Iterate over all Field objects within, including multi fields.""" for f in itervalues(self.properties.to_dict()): yield f @@ -87,7 +85,7 @@ def _collect_fields(self): for inner_f in f._collect_fields(): yield inner_f - def update(self, other_object): + def update(self, other_object: Any) -> None: if not hasattr(other_object, "properties"): # not an inner/nested object, no merge possible return @@ -102,25 +100,25 @@ def update(self, other_object): class Mapping(object): - def __init__(self): + def __init__(self) -> None: self.properties = Properties() - self._meta = {} + self._meta: Any = {} - def __repr__(self): + def __repr__(self) -> str: return "Mapping()" - def _clone(self): + def _clone(self) -> Any: m = Mapping() m.properties._params = self.properties._params.copy() return m @classmethod - def from_opensearch(cls, index, using="default"): + def from_opensearch(cls, index: Any, using: str = "default") -> Any: m = cls() m.update_from_opensearch(index, using) return m - def resolve_nested(self, field_path): + def resolve_nested(self, field_path: Any) -> Any: field = self nested = [] parts = field_path.split(".") @@ -133,18 +131,18 @@ def resolve_nested(self, field_path): nested.append(".".join(parts[: i + 1])) return nested, field - def resolve_field(self, field_path): + def resolve_field(self, field_path: Any) -> Any: field = self for step in field_path.split("."): try: field = field[step] except KeyError: - return + return None return field - def _collect_analysis(self): - analysis = {} - fields = [] + def _collect_analysis(self) -> Any: + analysis: Any = {} + fields: Any = [] if "_all" in self._meta: fields.append(Text(**self._meta["_all"])) @@ -170,20 +168,20 @@ def _collect_analysis(self): return analysis - def save(self, index, using="default"): + def save(self, index: Any, using: str = "default") -> Any: from opensearchpy.helpers.index import Index index = Index(index, using=using) index.mapping(self) return index.save() - def update_from_opensearch(self, index, using="default"): + def update_from_opensearch(self, index: Any, using: str = "default") -> None: opensearch = get_connection(using) raw = opensearch.indices.get_mapping(index=index) _, raw = raw.popitem() self._update_from_dict(raw["mappings"]) - def _update_from_dict(self, raw): + def _update_from_dict(self, raw: Any) -> None: for name, definition in iteritems(raw.get("properties", {})): self.field(name, definition) @@ -195,7 +193,7 @@ def _update_from_dict(self, raw): else: self.meta(name, value) - def update(self, mapping, update_only=False): + def update(self, mapping: Any, update_only: bool = False) -> None: for name in mapping: if update_only and name in self: # nested and inner objects, merge recursively @@ -212,20 +210,20 @@ def update(self, mapping, update_only=False): else: self._meta.update(mapping._meta) - def __contains__(self, name): + def __contains__(self, name: Any) -> Any: return name in self.properties.properties - def __getitem__(self, name): + def __getitem__(self, name: Any) -> Any: return self.properties.properties[name] - def __iter__(self): + def __iter__(self) -> Any: return iter(self.properties.properties) - def field(self, *args, **kwargs): + def field(self, *args: Any, **kwargs: Any) -> "Mapping": self.properties.field(*args, **kwargs) return self - def meta(self, name, params=None, **kwargs): + def meta(self, name: Any, params: Any = None, **kwargs: Any) -> "Mapping": if not name.startswith("_") and name not in META_FIELDS: name = "_" + name @@ -235,7 +233,7 @@ def meta(self, name, params=None, **kwargs): self._meta[name] = kwargs if params is None else params return self - def to_dict(self): + def to_dict(self) -> Any: meta = self._meta # hard coded serialization of analyzers in _all diff --git a/opensearchpy/helpers/mapping.pyi b/opensearchpy/helpers/mapping.pyi deleted file mode 100644 index 8dab731a..00000000 --- a/opensearchpy/helpers/mapping.pyi +++ /dev/null @@ -1,30 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import DslBase - -class Properties(DslBase): ... -class Mapping(object): ... diff --git a/opensearchpy/helpers/query.py b/opensearchpy/helpers/query.py index e132254b..e299f94a 100644 --- a/opensearchpy/helpers/query.py +++ b/opensearchpy/helpers/query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,21 +25,17 @@ # specific language governing permissions and limitations # under the License. -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from itertools import chain +from typing import Any, Optional # 'SF' looks unused but the test suite assumes it's available # from this module so others are liable to do so as well. -from ..helpers.function import SF # noqa: F401 -from ..helpers.function import ScoreFunction +from ..helpers.function import SF, ScoreFunction from .utils import DslBase -def Q(name_or_query="match_all", **params): +def Q(name_or_query: Any = "match_all", **params: Any) -> Any: # {"match": {"title": "python"}} if isinstance(name_or_query, collections_abc.Mapping): if params: @@ -48,7 +45,7 @@ def Q(name_or_query="match_all", **params): 'Q() can only accept dict with a single query ({"match": {...}}). ' "Instead it got (%r)" % name_or_query ) - name, params = name_or_query.copy().popitem() + name, params = name_or_query.copy().popitem() # type: ignore return Query.get_dsl_class(name)(_expand__to_dot=False, **params) # MatchAll() @@ -68,28 +65,28 @@ def Q(name_or_query="match_all", **params): class Query(DslBase): - _type_name = "query" + _type_name: str = "query" _type_shortcut = staticmethod(Q) - name = None + name: Optional[str] = None - def __add__(self, other): + def __add__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__radd__"): return other.__radd__(self) return Bool(must=[self, other]) - def __invert__(self): + def __invert__(self) -> Any: return Bool(must_not=[self]) - def __or__(self, other): + def __or__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__ror__"): return other.__ror__(self) return Bool(should=[self, other]) - def __and__(self, other): + def __and__(self, other: Any) -> Any: # make sure we give queries that know how to combine themselves # preference if hasattr(other, "__rand__"): @@ -100,17 +97,17 @@ def __and__(self, other): class MatchAll(Query): name = "match_all" - def __add__(self, other): + def __add__(self, other: Any) -> Any: return other._clone() __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> "MatchAll": return self __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchNone() @@ -120,17 +117,17 @@ def __invert__(self): class MatchNone(Query): name = "match_none" - def __add__(self, other): + def __add__(self, other: Any) -> "MatchNone": return self __and__ = __rand__ = __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: Any) -> Any: return other._clone() __ror__ = __or__ - def __invert__(self): + def __invert__(self) -> Any: return MatchAll() @@ -143,7 +140,7 @@ class Bool(Query): "filter": {"type": "query", "multi": True}, } - def __add__(self, other): + def __add__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -156,7 +153,7 @@ def __add__(self, other): __radd__ = __add__ - def __or__(self, other): + def __or__(self, other: "Bool") -> Any: for q in (self, other): if isinstance(q, Bool) and not any( (q.must, q.must_not, q.filter, getattr(q, "minimum_should_match", None)) @@ -181,14 +178,14 @@ def __or__(self, other): __ror__ = __or__ @property - def _min_should_match(self): + def _min_should_match(self) -> Any: return getattr( self, "minimum_should_match", 0 if not self.should or (self.must or self.filter) else 1, ) - def __invert__(self): + def __invert__(self) -> Any: # Because an empty Bool query is treated like # MatchAll the inverse should be MatchNone if not any(chain(self.must, self.filter, self.should, self.must_not)): @@ -208,7 +205,7 @@ def __invert__(self): return negations[0] return Bool(should=negations) - def __and__(self, other): + def __and__(self, other: "Bool") -> Any: q = self._clone() if isinstance(other, Bool): q.must += other.must @@ -255,7 +252,7 @@ class FunctionScore(Query): "functions": {"type": "score_function", "multi": True}, } - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: if "functions" in kwargs: pass else: @@ -523,3 +520,6 @@ class ParentId(Query): class Wrapper(Query): name = "wrapper" + + +__all__ = ["SF"] diff --git a/opensearchpy/helpers/query.pyi b/opensearchpy/helpers/query.pyi deleted file mode 100644 index a963ef05..00000000 --- a/opensearchpy/helpers/query.pyi +++ /dev/null @@ -1,95 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any - -from .utils import DslBase - -class Query(DslBase): ... -class MatchAll(Query): ... -class MatchNone(Query): ... -class Bool(Query): ... -class FunctionScore(Query): ... -class Boosting(Query): ... -class ConstantScore(Query): ... -class DisMax(Query): ... -class Filtered(Query): ... -class Indices(Query): ... -class Percolate(Query): ... -class Nested(Query): ... -class HasChild(Query): ... -class HasParent(Query): ... -class TopChildren(Query): ... -class SpanFirst(Query): ... -class SpanMulti(Query): ... -class SpanNear(Query): ... -class SpanNot(Query): ... -class SpanOr(Query): ... -class FieldMaskingSpan(Query): ... -class SpanContaining(Query): ... -class SpanWithin(Query): ... -class Common(Query): ... -class Fuzzy(Query): ... -class FuzzyLikeThis(Query): ... -class FuzzyLikeThisField(Query): ... -class RankFeature(Query): ... -class DistanceFeature(Query): ... -class GeoBoundingBox(Query): ... -class GeoDistance(Query): ... -class GeoDistanceRange(Query): ... -class GeoPolygon(Query): ... -class GeoShape(Query): ... -class GeohashCell(Query): ... -class Ids(Query): ... -class Intervals(Query): ... -class Limit(Query): ... -class Match(Query): ... -class MatchPhrase(Query): ... -class MatchPhrasePrefix(Query): ... -class MatchBoolPrefix(Query): ... -class Exists(Query): ... -class MoreLikeThis(Query): ... -class MoreLikeThisField(Query): ... -class MultiMatch(Query): ... -class Prefix(Query): ... -class QueryString(Query): ... -class Range(Query): ... -class Regexp(Query): ... -class Shape(Query): ... -class SimpleQueryString(Query): ... -class SpanTerm(Query): ... -class Template(Query): ... -class Term(Query): ... -class Terms(Query): ... -class TermsSet(Query): ... -class Wildcard(Query): ... -class Script(Query): ... -class ScriptScore(Query): ... -class Type(Query): ... -class ParentId(Query): ... -class Wrapper(Query): ... - -def Q(name_or_query: Any, **params: Any) -> Any: ... diff --git a/opensearchpy/helpers/response/__init__.py b/opensearchpy/helpers/response/__init__.py index 91e4c044..c6215a6b 100644 --- a/opensearchpy/helpers/response/__init__.py +++ b/opensearchpy/helpers/response/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,51 +25,51 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList, _wrap from .hit import Hit, HitMeta -__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] - class Response(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(Response, self).__init__(response) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.hits) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (slice, int)): # for slicing etc return self.hits[key] return super(Response, self).__getitem__(key) - def __nonzero__(self): + def __nonzero__(self) -> Any: return bool(self.hits) __bool__ = __nonzero__ - def __repr__(self): + def __repr__(self) -> str: return "" % (self.hits or self.aggregations) - def __len__(self): + def __len__(self) -> int: return len(self.hits) - def __getstate__(self): + def __getstate__(self) -> Any: return self._d_, self._search, self._doc_class - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) super(AttrDict, self).__setattr__("_search", state[1]) super(AttrDict, self).__setattr__("_doc_class", state[2]) - def success(self): + def success(self) -> bool: return self._shards.total == self._shards.successful and not self.timed_out @property - def hits(self): + def hits(self) -> Any: if not hasattr(self, "_hits"): h = self._d_["hits"] @@ -85,11 +86,11 @@ def hits(self): return self._hits @property - def aggregations(self): + def aggregations(self) -> Any: return self.aggs @property - def aggs(self): + def aggs(self) -> Any: if not hasattr(self, "_aggs"): aggs = AggResponse( self._search.aggs, self._search, self._d_.get("aggregations", {}) @@ -101,27 +102,30 @@ def aggs(self): class AggResponse(AttrDict): - def __init__(self, aggs, search, data): + def __init__(self, aggs: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__("_meta", {"search": search, "aggs": aggs}) super(AggResponse, self).__init__(data) - def __getitem__(self, attr_name): + def __getitem__(self, attr_name: Any) -> Any: if attr_name in self._meta["aggs"]: # don't do self._meta['aggs'][attr_name] to avoid copying agg = self._meta["aggs"].aggs[attr_name] return agg.result(self._meta["search"], self._d_[attr_name]) return super(AggResponse, self).__getitem__(attr_name) - def __iter__(self): + def __iter__(self) -> Any: for name in self._meta["aggs"]: yield self[name] class UpdateByQueryResponse(AttrDict): - def __init__(self, search, response, doc_class=None): + def __init__(self, search: Any, response: Any, doc_class: Any = None) -> None: super(AttrDict, self).__setattr__("_search", search) super(AttrDict, self).__setattr__("_doc_class", doc_class) super(UpdateByQueryResponse, self).__init__(response) - def success(self): + def success(self) -> bool: return not self.timed_out and not self.failures + + +__all__ = ["Response", "AggResponse", "UpdateByQueryResponse", "Hit", "HitMeta"] diff --git a/opensearchpy/helpers/response/__init__.pyi b/opensearchpy/helpers/response/__init__.pyi deleted file mode 100644 index 3f3af097..00000000 --- a/opensearchpy/helpers/response/__init__.pyi +++ /dev/null @@ -1,31 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from ..utils import AttrDict - -class Response(AttrDict): ... -class AggResponse(AttrDict): ... -class UpdateByQueryResponse(AttrDict): ... diff --git a/opensearchpy/helpers/response/aggs.py b/opensearchpy/helpers/response/aggs.py index a5e2e22d..42015d2d 100644 --- a/opensearchpy/helpers/response/aggs.py +++ b/opensearchpy/helpers/response/aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,17 +25,19 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, AttrList from . import AggResponse, Response class Bucket(AggResponse): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: super(Bucket, self).__init__(aggs, search, data) class FieldBucket(Bucket): - def __init__(self, aggs, search, data, field=None): + def __init__(self, aggs: Any, search: Any, data: Any, field: Any = None) -> None: if field: data["key"] = field.deserialize(data["key"]) super(FieldBucket, self).__init__(aggs, search, data, field) @@ -43,7 +46,7 @@ def __init__(self, aggs, search, data, field=None): class BucketData(AggResponse): _bucket_class = Bucket - def _wrap_bucket(self, data): + def _wrap_bucket(self, data: Any) -> Any: return self._bucket_class( self._meta["aggs"], self._meta["search"], @@ -51,19 +54,19 @@ def _wrap_bucket(self, data): field=self._meta.get("field"), ) - def __iter__(self): + def __iter__(self) -> Any: return iter(self.buckets) - def __len__(self): + def __len__(self) -> int: return len(self.buckets) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: if isinstance(key, (int, slice)): return self.buckets[key] return super(BucketData, self).__getitem__(key) @property - def buckets(self): + def buckets(self) -> Any: if not hasattr(self, "_buckets"): field = getattr(self._meta["aggs"], "field", None) if field: @@ -82,8 +85,11 @@ class FieldBucketData(BucketData): class TopHitsData(Response): - def __init__(self, agg, search, data): + def __init__(self, agg: Any, search: Any, data: Any) -> None: super(AttrDict, self).__setattr__( "meta", AttrDict({"agg": agg, "search": search}) ) super(TopHitsData, self).__init__(search, data) + + +__all__ = ["AggResponse"] diff --git a/opensearchpy/helpers/response/aggs.pyi b/opensearchpy/helpers/response/aggs.pyi deleted file mode 100644 index ba92e56b..00000000 --- a/opensearchpy/helpers/response/aggs.pyi +++ /dev/null @@ -1,34 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from . import AggResponse as AggResponse -from . import Response as Response - -class Bucket(AggResponse): ... -class FieldBucket(Bucket): ... -class BucketData(AggResponse): ... -class FieldBucketData(BucketData): ... -class TopHitsData(Response): ... diff --git a/opensearchpy/helpers/response/hit.py b/opensearchpy/helpers/response/hit.py index cf70a821..c6e8a4a9 100644 --- a/opensearchpy/helpers/response/hit.py +++ b/opensearchpy/helpers/response/hit.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,11 +25,13 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from ..utils import AttrDict, HitMeta class Hit(AttrDict): - def __init__(self, document): + def __init__(self, document: Any) -> None: data = {} if "_source" in document: data = document["_source"] @@ -39,22 +42,25 @@ def __init__(self, document): # assign meta as attribute and not as key in self._d_ super(AttrDict, self).__setattr__("meta", HitMeta(document)) - def __getstate__(self): + def __getstate__(self) -> Any: # add self.meta since it is not in self.__dict__ return super(Hit, self).__getstate__() + (self.meta,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("meta", state[-1]) super(Hit, self).__setstate__(state[:-1]) - def __dir__(self): + def __dir__(self) -> Any: # be sure to expose meta in dir(self) return super(Hit, self).__dir__() + ["meta"] - def __repr__(self): + def __repr__(self) -> str: return "".format( "/".join( getattr(self.meta, key) for key in ("index", "id") if key in self.meta ), super(Hit, self).__repr__(), ) + + +__all__ = ["Hit", "HitMeta"] diff --git a/opensearchpy/helpers/search.py b/opensearchpy/helpers/search.py index 0652b60a..069f4c89 100644 --- a/opensearchpy/helpers/search.py +++ b/opensearchpy/helpers/search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,12 +25,9 @@ # specific language governing permissions and limitations # under the License. +import collections.abc as collections_abc import copy - -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc +from typing import Any from six import iteritems, string_types @@ -38,8 +36,8 @@ from opensearchpy.helpers import scan from ..exceptions import IllegalOperation -from ..helpers.aggs import A, AggBase from ..helpers.query import Bool, Q +from .aggs import A, AggBase from .response import Hit, Response from .utils import AttrDict, DslBase, recursive_to_dict @@ -51,17 +49,17 @@ class QueryProxy(object): the wrapped query. """ - def __init__(self, search, attr_name): + def __init__(self, search: Any, attr_name: Any) -> None: self._search = search - self._proxied = None + self._proxied: Any = None self._attr_name = attr_name - def __nonzero__(self): + def __nonzero__(self) -> bool: return self._proxied is not None __bool__ = __nonzero__ - def __call__(self, *args, **kwargs): + def __call__(self, *args: Any, **kwargs: Any) -> Any: s = self._search._clone() # we cannot use self._proxied since we just cloned self._search and @@ -75,19 +73,19 @@ def __call__(self, *args, **kwargs): # always return search to be chainable return s - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: return getattr(self._proxied, attr_name) - def __setattr__(self, attr_name, value): + def __setattr__(self, attr_name: Any, value: Any) -> None: if not attr_name.startswith("_"): self._proxied = Q(self._proxied.to_dict()) setattr(self._proxied, attr_name, value) super(QueryProxy, self).__setattr__(attr_name, value) - def __getstate__(self): + def __getstate__(self) -> Any: return self._search, self._proxied, self._attr_name - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._search, self._proxied, self._attr_name = state @@ -100,13 +98,13 @@ class ProxyDescriptor(object): """ - def __init__(self, name): + def __init__(self, name: str) -> None: self._attr_name = "_%s_proxy" % name - def __get__(self, instance, owner): + def __get__(self, instance: Any, owner: Any) -> Any: return getattr(instance, self._attr_name) - def __set__(self, instance, value): + def __set__(self, instance: Any, value: Any) -> None: proxy = getattr(instance, self._attr_name) proxy._proxied = Q(value) @@ -114,17 +112,26 @@ def __set__(self, instance, value): class AggsProxy(AggBase, DslBase): name = "aggs" - def __init__(self, search): + def __init__(self, search: Any) -> None: self._base = self self._search = search self._params = {"aggs": {}} - def to_dict(self): + def to_dict(self) -> Any: return super(AggsProxy, self).to_dict().get("aggs", {}) class Request(object): - def __init__(self, using="default", index=None, doc_type=None, extra=None): + _doc_type: Any + _doc_type_map: Any + + def __init__( + self, + using: str = "default", + index: Any = None, + doc_type: Any = None, + extra: Any = None, + ) -> None: self._using = using self._index = None @@ -143,22 +150,22 @@ def __init__(self, using="default", index=None, doc_type=None, extra=None): elif doc_type: self._doc_type.append(doc_type) - self._params = {} - self._extra = extra or {} + self._params: Any = {} + self._extra: Any = extra or {} - def __eq__(self, other): + def __eq__(self: Any, other: Any) -> bool: return ( isinstance(other, Request) and other._params == self._params and other._index == self._index and other._doc_type == self._doc_type - and other.to_dict() == self.to_dict() + and other.to_dict() == self.to_dict() # type: ignore ) - def __copy__(self): + def __copy__(self) -> Any: return self._clone() - def params(self, **kwargs): + def params(self, **kwargs: Any) -> Any: """ Specify query params to be used when executing the search. All the keyword arguments will override the current values. @@ -172,7 +179,7 @@ def params(self, **kwargs): s._params.update(kwargs) return s - def index(self, *index): + def index(self, *index: Any) -> Any: """ Set the index for the search. If called empty it will remove all information. @@ -200,7 +207,7 @@ def index(self, *index): return s - def _resolve_field(self, path): + def _resolve_field(self, path: Any) -> Any: for dt in self._doc_type: if not hasattr(dt, "_index"): continue @@ -208,10 +215,10 @@ def _resolve_field(self, path): if field is not None: return field - def _resolve_nested(self, hit, parent_class=None): + def _resolve_nested(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit - nested_path = [] + nested_path: Any = [] nesting = hit["_nested"] while nesting and "field" in nesting: nested_path.append(nesting["field"]) @@ -228,7 +235,7 @@ def _resolve_nested(self, hit, parent_class=None): return doc_class - def _get_result(self, hit, parent_class=None): + def _get_result(self, hit: Any, parent_class: Any = None) -> Any: doc_class = Hit dt = hit.get("_type") @@ -252,7 +259,7 @@ def _get_result(self, hit, parent_class=None): callback = getattr(doc_class, "from_opensearch", doc_class) return callback(hit) - def doc_type(self, *doc_type, **kwargs): + def doc_type(self, *doc_type: Any, **kwargs: Any) -> Any: """ Set the type to search through. You can supply a single value or multiple. Values can be strings or subclasses of ``Document``. @@ -278,7 +285,7 @@ def doc_type(self, *doc_type, **kwargs): s._doc_type_map.update(kwargs) return s - def using(self, client): + def using(self, client: Any) -> Any: """ Associate the search request with an opensearch client. A fresh copy will be returned with current instance remaining unchanged. @@ -291,7 +298,7 @@ def using(self, client): s._using = client return s - def extra(self, **kwargs): + def extra(self, **kwargs: Any) -> Any: """ Add extra keys to the request body. Mostly here for backwards compatibility. @@ -302,7 +309,7 @@ def extra(self, **kwargs): s._extra.update(kwargs) return s - def _clone(self): + def _clone(self) -> Any: s = self.__class__( using=self._using, index=self._index, doc_type=self._doc_type ) @@ -316,7 +323,7 @@ class Search(Request): query = ProxyDescriptor("query") post_filter = ProxyDescriptor("post_filter") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Search request to opensearch. @@ -330,31 +337,31 @@ def __init__(self, **kwargs): super(Search, self).__init__(**kwargs) self.aggs = AggsProxy(self) - self._sort = [] - self._collapse = {} - self._source = None - self._highlight = {} - self._highlight_opts = {} - self._suggest = {} - self._script_fields = {} + self._sort: Any = [] + self._collapse: Any = {} + self._source: Any = None + self._highlight: Any = {} + self._highlight_opts: Any = {} + self._suggest: Any = {} + self._script_fields: Any = {} self._response_class = Response self._query_proxy = QueryProxy(self, "query") self._post_filter_proxy = QueryProxy(self, "post_filter") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) - def __iter__(self): + def __iter__(self) -> Any: """ Iterate over the hits. """ return iter(self.execute()) - def __getitem__(self, n): + def __getitem__(self, n: Any) -> Any: """ Support slicing the `Search` instance for pagination. @@ -389,7 +396,7 @@ def __getitem__(self, n): return s @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `Search` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -410,7 +417,7 @@ def from_dict(cls, d): s.update_from_dict(d) return s - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -433,7 +440,7 @@ def _clone(self): s.aggs._params = {"aggs": self.aggs._params["aggs"].copy()} return s - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -441,7 +448,7 @@ def response_class(self, cls): s._response_class = cls return s - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "Search": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -476,7 +483,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script_fields(self, **kwargs): + def script_fields(self, **kwargs: Any) -> Any: """ Define script fields to be calculated on hits. @@ -502,7 +509,7 @@ def script_fields(self, **kwargs): s._script_fields.update(kwargs) return s - def source(self, fields=None, **kwargs): + def source(self, fields: Any = None, **kwargs: Any) -> Any: """ Selectively control how the _source field is returned. @@ -547,7 +554,7 @@ def source(self, fields=None, **kwargs): return s - def sort(self, *keys): + def sort(self, *keys: Any) -> Any: """ Add sorting information to the search request. If called without arguments it will remove all sort requirements. Otherwise it will @@ -580,7 +587,12 @@ def sort(self, *keys): s._sort.append(k) return s - def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=None): + def collapse( + self, + field: Any = None, + inner_hits: Any = None, + max_concurrent_group_searches: Any = None, + ) -> Any: """ Add collapsing information to the search request. @@ -603,7 +615,7 @@ def collapse(self, field=None, inner_hits=None, max_concurrent_group_searches=No s._collapse["max_concurrent_group_searches"] = max_concurrent_group_searches return s - def highlight_options(self, **kwargs): + def highlight_options(self, **kwargs: Any) -> Any: """ Update the global highlighting options used for this request. For example:: @@ -615,7 +627,7 @@ def highlight_options(self, **kwargs): s._highlight_opts.update(kwargs) return s - def highlight(self, *fields, **kwargs): + def highlight(self, *fields: Any, **kwargs: Any) -> Any: """ Request highlighting of some fields. All keyword arguments passed in will be used as parameters for all the fields in the ``fields`` parameter. Example:: @@ -655,7 +667,7 @@ def highlight(self, *fields, **kwargs): s._highlight[f] = kwargs return s - def suggest(self, name, text, **kwargs): + def suggest(self, name: Any, text: Any, **kwargs: Any) -> Any: """ Add a suggestions request to the search. @@ -672,7 +684,7 @@ def suggest(self, name, text, **kwargs): s._suggest[name].update(kwargs) return s - def to_dict(self, count=False, **kwargs): + def to_dict(self, count: bool = False, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request's body. @@ -719,7 +731,7 @@ def to_dict(self, count=False, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def count(self): + def count(self) -> Any: """ Return the number of hits matching the query and filters. Note that only the actual number is returned. @@ -733,7 +745,7 @@ def count(self): # TODO: failed shards detection return opensearch.count(index=self._index, body=d, **self._params)["count"] - def execute(self, ignore_cache=False): + def execute(self, ignore_cache: bool = False) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. @@ -752,7 +764,7 @@ def execute(self, ignore_cache=False): ) return self._response - def scan(self): + def scan(self) -> Any: """ Turn the search into a scan search and return a generator that will iterate over all the documents matching the query. @@ -768,7 +780,7 @@ def scan(self): ): yield self._get_result(hit) - def delete(self): + def delete(self) -> Any: """ delete() executes the query by delegating to delete_by_query() """ @@ -788,22 +800,22 @@ class MultiSearch(Request): request. """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: super(MultiSearch, self).__init__(**kwargs) - self._searches = [] + self._searches: Any = [] - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return self._searches[key] - def __iter__(self): + def __iter__(self) -> Any: return iter(self._searches) - def _clone(self): + def _clone(self) -> Any: ms = super(MultiSearch, self)._clone() ms._searches = self._searches[:] return ms - def add(self, search): + def add(self, search: Any) -> Any: """ Adds a new :class:`~opensearchpy.Search` object to the request:: @@ -815,7 +827,7 @@ def add(self, search): ms._searches.append(search) return ms - def to_dict(self): + def to_dict(self) -> Any: out = [] for s in self._searches: meta = {} @@ -828,7 +840,7 @@ def to_dict(self): return out - def execute(self, ignore_cache=False, raise_on_error=True): + def execute(self, ignore_cache: Any = False, raise_on_error: Any = True) -> Any: """ Execute the multi search request and return a list of search results. """ @@ -852,3 +864,6 @@ def execute(self, ignore_cache=False, raise_on_error=True): self._response = out return self._response + + +__all__ = ["Q"] diff --git a/opensearchpy/helpers/search.pyi b/opensearchpy/helpers/search.pyi deleted file mode 100644 index 92b46243..00000000 --- a/opensearchpy/helpers/search.pyi +++ /dev/null @@ -1,35 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .aggs import AggBase -from .utils import DslBase - -class QueryProxy(object): ... -class ProxyDescriptor(object): ... -class AggsProxy(AggBase, DslBase): ... -class Request(object): ... -class Search(Request): ... -class MultiSearch(Request): ... diff --git a/opensearchpy/helpers/signer.py b/opensearchpy/helpers/signer.py index 9c330b7b..930b8d25 100644 --- a/opensearchpy/helpers/signer.py +++ b/opensearchpy/helpers/signer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,48 +8,18 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys +from typing import Any, Callable, Dict +from urllib.parse import parse_qs, urlencode, urlparse import requests -PY3 = sys.version_info[0] == 3 -if PY3: - from urllib.parse import parse_qs, urlencode, urlparse - - -def fetch_url(prepared_request): # type: ignore - """ - This is a util method that helps in reconstructing the request url. - :param prepared_request: unsigned request - :return: reconstructed url - """ - url = urlparse(prepared_request.url) - path = url.path or "/" - - # fetch the query string if present in the request - querystring = "" - if url.query: - querystring = "?" + urlencode( - parse_qs(url.query, keep_blank_values=True), doseq=True - ) - - # fetch the host information from headers - headers = dict( - (key.lower(), value) for key, value in prepared_request.headers.items() - ) - location = headers.get("host") or url.netloc - - # construct the url and return - return url.scheme + "://" + location + path + querystring - - -class AWSV4SignerAuth(requests.auth.AuthBase): +class AWSV4Signer: """ - AWS V4 Request Signer for Requests. + Generic AWS V4 Request Signer. """ - def __init__(self, credentials, region, service="es"): # type: ignore + def __init__(self, credentials, region: str, service: str = "es") -> Any: # type: ignore if not credentials: raise ValueError("Credentials cannot be empty") self.credentials = credentials @@ -61,6 +32,53 @@ def __init__(self, credentials, region, service="es"): # type: ignore raise ValueError("Service name cannot be empty") self.service = service + def sign(self, method: str, url: str, body: Any) -> Dict[str, str]: + """ + This method signs the request and returns headers. + :param method: HTTP method + :param url: url + :param body: body + :return: headers + """ + + from botocore.auth import SigV4Auth + from botocore.awsrequest import AWSRequest + + # create an AWS request object and sign it using SigV4Auth + aws_request = AWSRequest(method=method.upper(), url=url, data=body) + + # credentials objects expose access_key, secret_key and token attributes + # via @property annotations that call _refresh() on every access, + # creating a race condition if the credentials expire before secret_key + # is called but after access_key- the end result is the access_key doesn't + # correspond to the secret_key used to sign the request. To avoid this, + # get_frozen_credentials() which returns non-refreshing credentials is + # called if it exists. + credentials = ( + self.credentials.get_frozen_credentials() + if hasattr(self.credentials, "get_frozen_credentials") + and callable(self.credentials.get_frozen_credentials) + else self.credentials + ) + + sig_v4_auth = SigV4Auth(credentials, self.service, self.region) + sig_v4_auth.add_auth(aws_request) + + # copy the headers from AWS request object into the prepared_request + headers = dict(aws_request.headers.items()) + headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload(aws_request) + + return headers + + +class RequestsAWSV4SignerAuth(requests.auth.AuthBase): + """ + AWS V4 Request Signer for Requests. + """ + + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore + self.signer = AWSV4Signer(credentials, region, service) + def __call__(self, request): # type: ignore return self._sign_request(request) # type: ignore @@ -71,25 +89,50 @@ def _sign_request(self, prepared_request): # type: ignore :return: signed request """ - from botocore.auth import SigV4Auth - from botocore.awsrequest import AWSRequest + prepared_request.headers.update( + self.signer.sign( + prepared_request.method, + self._fetch_url(prepared_request), # type: ignore + prepared_request.body, + ) + ) - url = fetch_url(prepared_request) # type: ignore + return prepared_request - # create an AWS request object and sign it using SigV4Auth - aws_request = AWSRequest( - method=prepared_request.method.upper(), - url=url, - data=prepared_request.body, + def _fetch_url(self, prepared_request): # type: ignore + """ + This is a util method that helps in reconstructing the request url. + :param prepared_request: unsigned request + :return: reconstructed url + """ + url = urlparse(prepared_request.url) + path = url.path or "/" + + # fetch the query string if present in the request + querystring = "" + if url.query: + querystring = "?" + urlencode( + parse_qs(url.query, keep_blank_values=True), doseq=True + ) + + # fetch the host information from headers + headers = dict( + (key.lower(), value) for key, value in prepared_request.headers.items() ) + location = headers.get("host") or url.netloc - sig_v4_auth = SigV4Auth(self.credentials, self.service, self.region) - sig_v4_auth.add_auth(aws_request) + # construct the url and return + return url.scheme + "://" + location + path + querystring - # copy the headers from AWS request object into the prepared_request - prepared_request.headers.update(dict(aws_request.headers.items())) - prepared_request.headers["X-Amz-Content-SHA256"] = sig_v4_auth.payload( - aws_request - ) - return prepared_request +# Deprecated: use RequestsAWSV4SignerAuth +class AWSV4SignerAuth(RequestsAWSV4SignerAuth): + pass + + +class Urllib3AWSV4SignerAuth(Callable): # type: ignore + def __init__(self, credentials, region, service: str = "es") -> None: # type: ignore + self.signer = AWSV4Signer(credentials, region, service) + + def __call__(self, method: str, url: str, body: Any) -> Dict[str, str]: + return self.signer.sign(method, url, body) diff --git a/opensearchpy/helpers/test.py b/opensearchpy/helpers/test.py index 9338636a..bda16b2e 100644 --- a/opensearchpy/helpers/test.py +++ b/opensearchpy/helpers/test.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,22 +26,19 @@ # under the License. -# type: ignore - import os import time +from typing import Any from unittest import SkipTest, TestCase +import opensearchpy.client from opensearchpy import OpenSearch from opensearchpy.exceptions import ConnectionError -if "OPENSEARCH_URL" in os.environ: - OPENSEARCH_URL = os.environ["OPENSEARCH_URL"] -else: - OPENSEARCH_URL = "https://admin:admin@localhost:9200" +OPENSEARCH_URL = os.environ.get("OPENSEARCH_URL", "https://admin:admin@localhost:9200") -def get_test_client(nowait=False, **kwargs): +def get_test_client(nowait: bool = False, **kwargs: Any) -> OpenSearch: # construct kwargs from the environment kw = {"timeout": 30} @@ -52,7 +50,7 @@ def get_test_client(nowait=False, **kwargs): ) kw.update(kwargs) - client = OpenSearch(OPENSEARCH_URL, **kw) + client = OpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(1 if nowait else 100): @@ -67,15 +65,17 @@ def get_test_client(nowait=False, **kwargs): class OpenSearchTestCase(TestCase): + client: Any + @staticmethod - def _get_client(): + def _get_client() -> OpenSearch: return get_test_client() @classmethod - def setup_class(cls): + def setup_class(cls) -> None: cls.client = cls._get_client() - def teardown_method(self, _): + def teardown_method(self, _: Any) -> None: # Hidden indices expanded in wildcards in OpenSearch 7.7 expand_wildcards = ["open", "closed"] if self.opensearch_version() >= (1, 0): @@ -86,20 +86,20 @@ def teardown_method(self, _): ) self.client.indices.delete_template(name="*", ignore=404) - def opensearch_version(self): + def opensearch_version(self) -> Any: if not hasattr(self, "_opensearch_version"): self._opensearch_version = opensearch_version(self.client) return self._opensearch_version -def _get_version(version_string): +def _get_version(version_string: str) -> Any: if "." not in version_string: return () version = version_string.strip().split(".") return tuple(int(v) if v.isdigit() else 999 for v in version) -def opensearch_version(client): +def opensearch_version(client: opensearchpy.client.OpenSearch) -> Any: return _get_version(client.info()["version"]["number"]) @@ -111,3 +111,5 @@ def opensearch_version(client): verify_certs=False, ) OPENSEARCH_VERSION = opensearch_version(client) + +__all__ = ["OpenSearchTestCase"] diff --git a/opensearchpy/helpers/test.pyi b/opensearchpy/helpers/test.pyi deleted file mode 100644 index 1363f821..00000000 --- a/opensearchpy/helpers/test.pyi +++ /dev/null @@ -1,43 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Tuple -from unittest import TestCase - -from ..client import OpenSearch - -OPENSEARCH_URL: str - -def get_test_client(nowait: bool = ..., **kwargs: Any) -> OpenSearch: ... -def _get_version(version_string: str) -> Tuple[int, ...]: ... - -class OpenSearchTestCase(TestCase): - @staticmethod - def _get_client() -> OpenSearch: ... - @classmethod - def setup_class(cls) -> None: ... - def teardown_method(self, _: Any) -> None: ... - def opensearch_version(self) -> Tuple[int, ...]: ... diff --git a/opensearchpy/helpers/update_by_query.py b/opensearchpy/helpers/update_by_query.py index 3be888bf..7b560216 100644 --- a/opensearchpy/helpers/update_by_query.py +++ b/opensearchpy/helpers/update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.connection.connections import get_connection from ..helpers.query import Bool, Q @@ -35,7 +38,7 @@ class UpdateByQuery(Request): query = ProxyDescriptor("query") - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """ Update by query request to opensearch. @@ -49,17 +52,17 @@ def __init__(self, **kwargs): """ super(UpdateByQuery, self).__init__(**kwargs) self._response_class = UpdateByQueryResponse - self._script = {} + self._script: Any = {} self._query_proxy = QueryProxy(self, "query") - def filter(self, *args, **kwargs): + def filter(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[Q(*args, **kwargs)])) - def exclude(self, *args, **kwargs): + def exclude(self, *args: Any, **kwargs: Any) -> Any: return self.query(Bool(filter=[~Q(*args, **kwargs)])) @classmethod - def from_dict(cls, d): + def from_dict(cls, d: Any) -> Any: """ Construct a new `UpdateByQuery` instance from a raw dict containing the search body. Useful when migrating from raw dictionaries. @@ -80,7 +83,7 @@ def from_dict(cls, d): u.update_from_dict(d) return u - def _clone(self): + def _clone(self) -> Any: """ Return a clone of the current search request. Performs a shallow copy of all the underlying objects. Used internally by most state modifying @@ -93,7 +96,7 @@ def _clone(self): ubq.query._proxied = self.query._proxied return ubq - def response_class(self, cls): + def response_class(self, cls: Any) -> Any: """ Override the default wrapper used for the response. """ @@ -101,7 +104,7 @@ def response_class(self, cls): ubq._response_class = cls return ubq - def update_from_dict(self, d): + def update_from_dict(self, d: Any) -> "UpdateByQuery": """ Apply options from a serialized body to the current instance. Modifies the object in-place. Used mostly by ``from_dict``. @@ -114,7 +117,7 @@ def update_from_dict(self, d): self._extra.update(d) return self - def script(self, **kwargs): + def script(self, **kwargs: Any) -> Any: """ Define update action to take: @@ -135,7 +138,7 @@ def script(self, **kwargs): ubq._script.update(kwargs) return ubq - def to_dict(self, **kwargs): + def to_dict(self, **kwargs: Any) -> Any: """ Serialize the search into the dictionary that will be sent over as the request'ubq body. @@ -153,7 +156,7 @@ def to_dict(self, **kwargs): d.update(recursive_to_dict(kwargs)) return d - def execute(self): + def execute(self) -> Any: """ Execute the search and return an instance of ``Response`` wrapping all the data. diff --git a/opensearchpy/helpers/update_by_query.pyi b/opensearchpy/helpers/update_by_query.pyi deleted file mode 100644 index 90597033..00000000 --- a/opensearchpy/helpers/update_by_query.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .search import Request - -class UpdateByQuery(Request): ... diff --git a/opensearchpy/helpers/utils.py b/opensearchpy/helpers/utils.py index 3ebea18e..2a9f19da 100644 --- a/opensearchpy/helpers/utils.py +++ b/opensearchpy/helpers/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,19 +27,16 @@ from __future__ import unicode_literals -try: - import collections.abc as collections_abc # only works on python 3.3+ -except ImportError: - import collections as collections_abc - +import collections.abc as collections_abc from copy import copy +from typing import Any, Callable, Dict, Optional, Tuple from six import add_metaclass, iteritems from six.moves import map from opensearchpy.exceptions import UnknownDslObject, ValidationException -SKIP_VALUES = ("", None) +SKIP_VALUES: Tuple[str, None] = ("", None) EXPAND__TO_DOT = True DOC_META_FIELDS = frozenset( @@ -61,7 +59,7 @@ ).union(DOC_META_FIELDS) -def _wrap(val, obj_wrapper=None): +def _wrap(val: Any, obj_wrapper: Optional[Callable[..., Any]] = None) -> Any: if isinstance(val, collections_abc.Mapping): return AttrDict(val) if obj_wrapper is None else obj_wrapper(val) if isinstance(val, list): @@ -70,52 +68,54 @@ def _wrap(val, obj_wrapper=None): class AttrList(object): - def __init__(self, p, obj_wrapper=None): + def __init__( + self, p: Any, obj_wrapper: Optional[Callable[..., Any]] = None + ) -> None: # make iterables into lists if not isinstance(p, list): p = list(p) self._l_ = p self._obj_wrapper = obj_wrapper - def __repr__(self): + def __repr__(self) -> str: return repr(self._l_) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrList): - return other._l_ == self._l_ + return bool(other._l_ == self._l_) # make sure we still equal to a dict with the same data - return other == self._l_ + return bool(other == self._l_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __getitem__(self, k): + def __getitem__(self, k: Any) -> Any: p = self._l_[k] if isinstance(k, slice): return AttrList(p, obj_wrapper=self._obj_wrapper) return _wrap(p, self._obj_wrapper) - def __setitem__(self, k, value): + def __setitem__(self, k: Any, value: Any) -> None: self._l_[k] = value - def __iter__(self): + def __iter__(self) -> Any: return map(lambda i: _wrap(i, self._obj_wrapper), self._l_) - def __len__(self): + def __len__(self) -> int: return len(self._l_) - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._l_) __bool__ = __nonzero__ - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: return getattr(self._l_, name) - def __getstate__(self): + def __getstate__(self) -> Any: return self._l_, self._obj_wrapper - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: self._l_, self._obj_wrapper = state @@ -126,44 +126,44 @@ class AttrDict(object): nested dsl dicts. """ - def __init__(self, d): + def __init__(self, d: Any) -> None: # assign the inner dict manually to prevent __setattr__ from firing super(AttrDict, self).__setattr__("_d_", d) - def __contains__(self, key): + def __contains__(self, key: Any) -> bool: return key in self._d_ - def __nonzero__(self): + def __nonzero__(self) -> bool: return bool(self._d_) __bool__ = __nonzero__ - def __dir__(self): + def __dir__(self) -> Any: # introspection for auto-complete in IPython etc return list(self._d_.keys()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: if isinstance(other, AttrDict): - return other._d_ == self._d_ + return bool(other._d_ == self._d_) # make sure we still equal to a dict with the same data - return other == self._d_ + return bool(other == self._d_) - def __ne__(self, other): - return not self == other + def __ne__(self, other: Any) -> bool: + return bool(not self == other) - def __repr__(self): + def __repr__(self) -> str: r = repr(self._d_) if len(r) > 60: r = r[:60] + "...}" return r - def __getstate__(self): + def __getstate__(self) -> Any: return (self._d_,) - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: super(AttrDict, self).__setattr__("_d_", state[0]) - def __getattr__(self, attr_name): + def __getattr__(self, attr_name: Any) -> Any: try: return self.__getitem__(attr_name) except KeyError: @@ -173,7 +173,7 @@ def __getattr__(self, attr_name): ) ) - def get(self, key, default=None): + def get(self, key: Any, default: Any = None) -> Any: try: return self.__getattr__(key) except AttributeError: @@ -181,7 +181,7 @@ def get(self, key, default=None): return default raise - def __delattr__(self, attr_name): + def __delattr__(self, attr_name: Any) -> None: try: del self._d_[attr_name] except KeyError: @@ -191,26 +191,26 @@ def __delattr__(self, attr_name): ) ) - def __getitem__(self, key): + def __getitem__(self, key: Any) -> Any: return _wrap(self._d_[key]) - def __setitem__(self, key, value): + def __setitem__(self, key: Any, value: Any) -> None: self._d_[key] = value - def __delitem__(self, key): + def __delitem__(self, key: Any) -> None: del self._d_[key] - def __setattr__(self, name, value): + def __setattr__(self, name: Any, value: Any) -> None: if name in self._d_ or not hasattr(self.__class__, name): self._d_[name] = value else: # there is an attribute on the class (could be property, ..) - don't add it as field super(AttrDict, self).__setattr__(name, value) - def __iter__(self): + def __iter__(self) -> Any: return iter(self._d_) - def to_dict(self): + def to_dict(self) -> Any: return self._d_ @@ -222,20 +222,21 @@ class DslMeta(type): It then uses the information from that registry (as well as `name` and `shortcut` attributes from the base class) to construct any subclass based - on it's name. + on its name. For typical use see `QueryMeta` and `Query` in `opensearchpy.query`. """ - _types = {} + _types: Dict[str, Any] = {} - def __init__(cls, name, bases, attrs): + def __init__(cls: Any, name: str, bases: Any, attrs: Any) -> None: + # TODO: why is it calling itself?! super(DslMeta, cls).__init__(name, bases, attrs) # skip for DslBase if not hasattr(cls, "_type_shortcut"): return if cls.name is None: - # abstract base class, register it's shortcut + # abstract base class, register its shortcut cls._types[cls._type_name] = cls._type_shortcut # and create a registry for subclasses if not hasattr(cls, "_classes"): @@ -245,7 +246,7 @@ def __init__(cls, name, bases, attrs): cls._classes[cls.name] = cls @classmethod - def get_dsl_type(cls, name): + def get_dsl_type(cls, name: Any) -> Any: try: return cls._types[name] except KeyError: @@ -264,15 +265,16 @@ class DslBase(object): - to_dict method to serialize into dict (to be sent via opensearch-py) - basic logical operators (&, | and ~) using a Bool(Filter|Query) TODO: move into a class specific for Query/Filter - - respects the definition of the class and (de)serializes it's + - respects the definition of the class and (de)serializes its attributes based on the `_param_defs` definition (for example turning all values in the `must` attribute into Query objects) """ - _param_defs = {} + _param_defs: Dict[str, Any] = {} + _params: Dict[str, Any] @classmethod - def get_dsl_class(cls, name, default=None): + def get_dsl_class(cls: Any, name: Any, default: Optional[bool] = None) -> Any: try: return cls._classes[name] except KeyError: @@ -282,14 +284,14 @@ def get_dsl_class(cls, name, default=None): "DSL class `{}` does not exist in {}.".format(name, cls._type_name) ) - def __init__(self, _expand__to_dot=EXPAND__TO_DOT, **params): + def __init__(self, _expand__to_dot: Any = EXPAND__TO_DOT, **params: Any) -> None: self._params = {} for pname, pvalue in iteritems(params): if "__" in pname and _expand__to_dot: pname = pname.replace("__", ".") self._setattr(pname, pvalue) - def _repr_params(self): + def _repr_params(self) -> str: """Produce a repr of all our parameters to be used in __repr__.""" return ", ".join( "{}={!r}".format(n.replace(".", "__"), v) @@ -298,21 +300,21 @@ def _repr_params(self): if "type" not in self._param_defs.get(n, {}) or v ) - def __repr__(self): + def __repr__(self) -> str: return "{}({})".format(self.__class__.__name__, self._repr_params()) - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: return isinstance(other, self.__class__) and other.to_dict() == self.to_dict() - def __ne__(self, other): + def __ne__(self, other: Any) -> bool: return not self == other - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: Optional[bool]) -> None: if name.startswith("_"): return super(DslBase, self).__setattr__(name, value) return self._setattr(name, value) - def _setattr(self, name, value): + def _setattr(self, name: Any, value: Any) -> None: # if this attribute has special type assigned to it... if name in self._param_defs: pinfo = self._param_defs[name] @@ -342,7 +344,7 @@ def _setattr(self, name, value): value = shortcut(value) self._params[name] = value - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: if name.startswith("_"): raise AttributeError( "{!r} object has no attribute {!r}".format( @@ -374,7 +376,7 @@ def __getattr__(self, name): return AttrDict(value) return value - def to_dict(self): + def to_dict(self) -> Any: """ Serialize the DSL object to plain dict """ @@ -413,7 +415,7 @@ def to_dict(self): d[pname] = value return {self.name: d} - def _clone(self): + def _clone(self) -> Any: c = self.__class__() for attr in self._params: c._params[attr] = copy(self._params[attr]) @@ -421,7 +423,9 @@ def _clone(self): class HitMeta(AttrDict): - def __init__(self, document, exclude=("_source", "_fields")): + def __init__( + self, document: Dict[str, Any], exclude: Any = ("_source", "_fields") + ) -> None: d = { k[1:] if k.startswith("_") else k: v for (k, v) in iteritems(document) @@ -434,7 +438,9 @@ def __init__(self, document, exclude=("_source", "_fields")): class ObjectBase(AttrDict): - def __init__(self, meta=None, **kwargs): + _doc_type: Any + + def __init__(self, meta: Any = None, **kwargs: Any) -> None: meta = meta or {} for k in list(kwargs): if k.startswith("_") and k[1:] in META_FIELDS: @@ -445,7 +451,7 @@ def __init__(self, meta=None, **kwargs): super(ObjectBase, self).__init__(kwargs) @classmethod - def __list_fields(cls): + def __list_fields(cls: Any) -> Any: """ Get all the fields defined for our class, if we have an Index, try looking at the index mappings as well, mark the fields from Index as @@ -466,7 +472,7 @@ def __list_fields(cls): yield name, field, True @classmethod - def __get_field(cls, name): + def __get_field(cls: Any, name: Any) -> Any: try: return cls._doc_type.mapping[name] except KeyError: @@ -478,30 +484,30 @@ def __get_field(cls, name): pass @classmethod - def from_opensearch(cls, hit): + def from_opensearch(cls: Any, hit: Any) -> Any: meta = hit.copy() data = meta.pop("_source", {}) doc = cls(meta=meta) doc._from_dict(data) return doc - def _from_dict(self, data): + def _from_dict(self, data: Any) -> None: for k, v in iteritems(data): f = self.__get_field(k) if f and f._coerce: v = f.deserialize(v) setattr(self, k, v) - def __getstate__(self): + def __getstate__(self) -> Any: return self.to_dict(), self.meta._d_ - def __setstate__(self, state): + def __setstate__(self, state: Any) -> None: data, meta = state super(AttrDict, self).__setattr__("_d_", {}) super(AttrDict, self).__setattr__("meta", HitMeta(meta)) self._from_dict(data) - def __getattr__(self, name): + def __getattr__(self, name: Any) -> Any: try: return super(ObjectBase, self).__getattr__(name) except AttributeError: @@ -514,7 +520,7 @@ def __getattr__(self, name): return value raise - def to_dict(self, skip_empty=True): + def to_dict(self, skip_empty: Optional[bool] = True) -> Any: out = {} for k, v in iteritems(self._d_): # if this is a mapped field, @@ -535,8 +541,8 @@ def to_dict(self, skip_empty=True): out[k] = v return out - def clean_fields(self): - errors = {} + def clean_fields(self) -> None: + errors: Dict[str, Any] = {} for name, field, optional in self.__list_fields(): data = self._d_.get(name, None) if data is None and optional: @@ -553,15 +559,15 @@ def clean_fields(self): if errors: raise ValidationException(errors) - def clean(self): + def clean(self) -> None: pass - def full_clean(self): + def full_clean(self) -> None: self.clean_fields() self.clean() -def merge(data, new_data, raise_on_conflict=False): +def merge(data: Any, new_data: Any, raise_on_conflict: bool = False) -> None: if not ( isinstance(data, (AttrDict, collections_abc.Mapping)) and isinstance(new_data, (AttrDict, collections_abc.Mapping)) @@ -572,6 +578,13 @@ def merge(data, new_data, raise_on_conflict=False): ) ) + if not isinstance(new_data, Dict): + raise ValueError( + "You can only merge two dicts! Got {!r} and {!r} instead.".format( + data, new_data + ) + ) + for key, value in iteritems(new_data): if ( key in data @@ -582,10 +595,10 @@ def merge(data, new_data, raise_on_conflict=False): elif key in data and data[key] != value and raise_on_conflict: raise ValueError("Incompatible data for key %r, cannot be merged." % key) else: - data[key] = value + data[key] = value # type: ignore -def recursive_to_dict(data): +def recursive_to_dict(data: Any) -> Any: """Recursively transform objects that potentially have .to_dict() into dictionary literals by traversing AttrList, AttrDict, list, tuple, and Mapping types. diff --git a/opensearchpy/helpers/utils.pyi b/opensearchpy/helpers/utils.pyi deleted file mode 100644 index 74783974..00000000 --- a/opensearchpy/helpers/utils.pyi +++ /dev/null @@ -1,32 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -class AttrList(object): ... -class AttrDict(object): ... -class DslMeta(type): ... -class DslBase(object): ... -class HitMeta(AttrDict): ... -class ObjectBase(AttrDict): ... diff --git a/opensearchpy/helpers/wrappers.py b/opensearchpy/helpers/wrappers.py index 19cf3dec..1583391c 100644 --- a/opensearchpy/helpers/wrappers.py +++ b/opensearchpy/helpers/wrappers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,13 +26,12 @@ # under the License. import operator +from typing import Any from six import iteritems, string_types from .utils import AttrDict -__all__ = ["Range"] - class Range(AttrDict): OPS = { @@ -41,7 +41,7 @@ class Range(AttrDict): "gte": operator.ge, } - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: if args and (len(args) > 1 or kwargs or not isinstance(args[0], dict)): raise ValueError( "Range accepts a single dictionary or a set of keyword arguments." @@ -60,10 +60,10 @@ def __init__(self, *args, **kwargs): super(Range, self).__init__(args[0] if args else kwargs) - def __repr__(self): + def __repr__(self) -> str: return "Range(%s)" % ", ".join("%s=%r" % op for op in iteritems(self._d_)) - def __contains__(self, item): + def __contains__(self, item: Any) -> bool: if isinstance(item, string_types): return super(Range, self).__contains__(item) @@ -73,7 +73,7 @@ def __contains__(self, item): return True @property - def upper(self): + def upper(self) -> Any: if "lt" in self._d_: return self._d_["lt"], False if "lte" in self._d_: @@ -81,9 +81,12 @@ def upper(self): return None, False @property - def lower(self): + def lower(self) -> Any: if "gt" in self._d_: return self._d_["gt"], False if "gte" in self._d_: return self._d_["gte"], True return None, False + + +__all__ = ["Range"] diff --git a/opensearchpy/helpers/wrappers.pyi b/opensearchpy/helpers/wrappers.pyi deleted file mode 100644 index fc79c384..00000000 --- a/opensearchpy/helpers/wrappers.pyi +++ /dev/null @@ -1,29 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from .utils import AttrDict - -class Range(AttrDict): ... diff --git a/opensearchpy/plugins/__init__.py b/opensearchpy/plugins/__init__.py index 2f42da79..b0a5fb09 100644 --- a/opensearchpy/plugins/__init__.py +++ b/opensearchpy/plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/opensearchpy/plugins/__init__.pyi b/opensearchpy/plugins/__init__.pyi deleted file mode 100644 index 6c0097cd..00000000 --- a/opensearchpy/plugins/__init__.pyi +++ /dev/null @@ -1,8 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. diff --git a/opensearchpy/plugins/alerting.py b/opensearchpy/plugins/alerting.py index defbf326..02c6b1a1 100644 --- a/opensearchpy/plugins/alerting.py +++ b/opensearchpy/plugins/alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,12 +9,14 @@ # GitHub history for details. +from typing import Any + from ..client.utils import NamespacedClient, _make_path, query_params class AlertingClient(NamespacedClient): @query_params() - def search_monitor(self, body, params=None, headers=None): + def search_monitor(self, body: Any, params: Any = None, headers: Any = None) -> Any: """ Returns the search result for a monitor. @@ -28,7 +31,9 @@ def search_monitor(self, body, params=None, headers=None): ) @query_params() - def get_monitor(self, monitor_id, params=None, headers=None): + def get_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific monitor. @@ -42,7 +47,9 @@ def get_monitor(self, monitor_id, params=None, headers=None): ) @query_params("dryrun") - def run_monitor(self, monitor_id, params=None, headers=None): + def run_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Runs/Executes a specific monitor. @@ -57,7 +64,9 @@ def run_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def create_monitor(self, body=None, params=None, headers=None): + def create_monitor( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a monitor with inputs, triggers, and actions. @@ -72,7 +81,9 @@ def create_monitor(self, body=None, params=None, headers=None): ) @query_params() - def update_monitor(self, monitor_id, body=None, params=None, headers=None): + def update_monitor( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates a monitor's inputs, triggers, and actions. @@ -88,7 +99,9 @@ def update_monitor(self, monitor_id, body=None, params=None, headers=None): ) @query_params() - def delete_monitor(self, monitor_id, params=None, headers=None): + def delete_monitor( + self, monitor_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific monitor. @@ -102,7 +115,9 @@ def delete_monitor(self, monitor_id, params=None, headers=None): ) @query_params() - def get_destination(self, destination_id=None, params=None, headers=None): + def get_destination( + self, destination_id: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Returns the details of a specific destination. @@ -118,7 +133,9 @@ def get_destination(self, destination_id=None, params=None, headers=None): ) @query_params() - def create_destination(self, body=None, params=None, headers=None): + def create_destination( + self, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates a destination for slack, mail, or custom-webhook. @@ -133,7 +150,13 @@ def create_destination(self, body=None, params=None, headers=None): ) @query_params() - def update_destination(self, destination_id, body=None, params=None, headers=None): + def update_destination( + self, + destination_id: Any, + body: Any = None, + params: Any = None, + headers: Any = None, + ) -> Any: """ Updates a destination's inputs, triggers, and actions. @@ -149,7 +172,9 @@ def update_destination(self, destination_id, body=None, params=None, headers=Non ) @query_params() - def delete_destination(self, destination_id, params=None, headers=None): + def delete_destination( + self, destination_id: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes a specific destination. @@ -163,7 +188,7 @@ def delete_destination(self, destination_id, params=None, headers=None): ) @query_params() - def get_alerts(self, params=None, headers=None): + def get_alerts(self, params: Any = None, headers: Any = None) -> Any: """ Returns all alerts. @@ -176,7 +201,9 @@ def get_alerts(self, params=None, headers=None): ) @query_params() - def acknowledge_alert(self, monitor_id, body=None, params=None, headers=None): + def acknowledge_alert( + self, monitor_id: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Acknowledges an alert. diff --git a/opensearchpy/plugins/alerting.pyi b/opensearchpy/plugins/alerting.pyi deleted file mode 100644 index d712e762..00000000 --- a/opensearchpy/plugins/alerting.pyi +++ /dev/null @@ -1,72 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class AlertingClient(NamespacedClient): - def search_monitor( - self, body: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def run_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def create_monitor( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_monitor( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_monitor( - self, monitor_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_destination( - self, - destination_id: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def create_destination( - self, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def update_destination( - self, - destination_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_destination( - self, destination_id: Any, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def get_alerts( - self, params: Any | None = ..., headers: Any | None = ... - ) -> Union[bool, Any]: ... - def acknowledge_alert( - self, - monitor_id: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/plugins/index_management.py b/opensearchpy/plugins/index_management.py index 435ab8d4..77a31279 100644 --- a/opensearchpy/plugins/index_management.py +++ b/opensearchpy/plugins/index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,12 +9,16 @@ # GitHub history for details. +from typing import Any + from ..client.utils import SKIP_IN_PATH, NamespacedClient, _make_path, query_params class IndexManagementClient(NamespacedClient): @query_params() - def put_policy(self, policy, body=None, params=None, headers=None): + def put_policy( + self, policy: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Creates, or updates, a policy. @@ -31,7 +36,9 @@ def put_policy(self, policy, body=None, params=None, headers=None): ) @query_params() - def add_policy(self, index, body=None, params=None, headers=None): + def add_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Adds a policy to an index. This operation does not change the policy if the index already has one. @@ -49,7 +56,7 @@ def add_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def get_policy(self, policy, params=None, headers=None): + def get_policy(self, policy: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the policy by `policy_id`. @@ -66,7 +73,9 @@ def get_policy(self, policy, params=None, headers=None): ) @query_params() - def remove_policy_from_index(self, index, params=None, headers=None): + def remove_policy_from_index( + self, index: Any, params: Any = None, headers: Any = None + ) -> Any: """ Removes any ISM policy from the index. @@ -83,7 +92,9 @@ def remove_policy_from_index(self, index, params=None, headers=None): ) @query_params() - def change_policy(self, index, body=None, params=None, headers=None): + def change_policy( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Updates the managed index policy to a new policy (or to a new version of the policy). @@ -101,7 +112,9 @@ def change_policy(self, index, body=None, params=None, headers=None): ) @query_params() - def retry(self, index, body=None, params=None, headers=None): + def retry( + self, index: Any, body: Any = None, params: Any = None, headers: Any = None + ) -> Any: """ Retries the failed action for an index. @@ -119,7 +132,7 @@ def retry(self, index, body=None, params=None, headers=None): ) @query_params("show_policy") - def explain_index(self, index, params=None, headers=None): + def explain_index(self, index: Any, params: Any = None, headers: Any = None) -> Any: """ Gets the current state of the index. @@ -136,7 +149,9 @@ def explain_index(self, index, params=None, headers=None): ) @query_params() - def delete_policy(self, policy, params=None, headers=None): + def delete_policy( + self, policy: Any, params: Any = None, headers: Any = None + ) -> Any: """ Deletes the policy by `policy_id`. diff --git a/opensearchpy/plugins/index_management.pyi b/opensearchpy/plugins/index_management.pyi deleted file mode 100644 index 24a59dc9..00000000 --- a/opensearchpy/plugins/index_management.pyi +++ /dev/null @@ -1,71 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. - -from typing import Any, Union - -from ..client.utils import NamespacedClient as NamespacedClient -from ..client.utils import query_params as query_params - -class IndexManagementClient(NamespacedClient): - def put_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def add_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def get_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def remove_policy_from_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def change_policy( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def retry( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def explain_index( - self, - index: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... - def delete_policy( - self, - policy: Any, - body: Any | None = ..., - params: Any | None = ..., - headers: Any | None = ..., - ) -> Union[bool, Any]: ... diff --git a/opensearchpy/serializer.py b/opensearchpy/serializer.py index 295c4af0..e8c87ba9 100644 --- a/opensearchpy/serializer.py +++ b/opensearchpy/serializer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,10 +26,12 @@ # under the License. +from typing import Any, Dict, Optional + try: import simplejson as json except ImportError: - import json + import json # type: ignore import uuid from datetime import date, datetime @@ -44,22 +47,22 @@ class Serializer(object): - mimetype = "" + mimetype: str = "" - def loads(self, s): + def loads(self, s: str) -> Any: raise NotImplementedError() - def dumps(self, data): + def dumps(self, data: Any) -> Any: raise NotImplementedError() class TextSerializer(Serializer): - mimetype = "text/plain" + mimetype: str = "text/plain" - def loads(self, s): + def loads(self, s: str) -> Any: return s - def dumps(self, data): + def dumps(self, data: Any) -> Any: if isinstance(data, string_types): return data @@ -67,9 +70,9 @@ def dumps(self, data): class JSONSerializer(Serializer): - mimetype = "application/json" + mimetype: str = "application/json" - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, TIME_TYPES): # Little hack to avoid importing pandas but to not # return 'NaT' string for pd.NaT as that's not a valid @@ -141,13 +144,13 @@ def default(self, data): raise TypeError("Unable to serialize %r (type: %s)" % (data, type(data))) - def loads(self, s): + def loads(self, s: str) -> Any: try: return json.loads(s) except (ValueError, TypeError) as e: raise SerializationError(s, e) - def dumps(self, data): + def dumps(self, data: Any) -> Any: # don't serialize strings if isinstance(data, string_types): return data @@ -160,14 +163,18 @@ def dumps(self, data): raise SerializationError(data, e) -DEFAULT_SERIALIZERS = { +DEFAULT_SERIALIZERS: Dict[str, Serializer] = { JSONSerializer.mimetype: JSONSerializer(), TextSerializer.mimetype: TextSerializer(), } class Deserializer(object): - def __init__(self, serializers, default_mimetype="application/json"): + def __init__( + self, + serializers: Dict[str, Serializer], + default_mimetype: str = "application/json", + ) -> None: try: self.default = serializers[default_mimetype] except KeyError: @@ -176,7 +183,7 @@ def __init__(self, serializers, default_mimetype="application/json"): ) self.serializers = serializers - def loads(self, s, mimetype=None): + def loads(self, s: str, mimetype: Optional[str] = None) -> Any: if not mimetype: deserializer = self.default else: @@ -198,7 +205,7 @@ def loads(self, s, mimetype=None): class AttrJSONSerializer(JSONSerializer): - def default(self, data): + def default(self, data: Any) -> Any: if isinstance(data, AttrList): return data._l_ if hasattr(data, "to_dict"): diff --git a/opensearchpy/serializer.pyi b/opensearchpy/serializer.pyi deleted file mode 100644 index c68f51ca..00000000 --- a/opensearchpy/serializer.pyi +++ /dev/null @@ -1,55 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Dict, Optional - -class Serializer(object): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class TextSerializer(Serializer): - mimetype: str - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -class JSONSerializer(Serializer): - mimetype: str - def default(self, data: Any) -> Any: ... - def loads(self, s: str) -> Any: ... - def dumps(self, data: Any) -> str: ... - -DEFAULT_SERIALIZERS: Dict[str, Serializer] - -class Deserializer(object): - def __init__( - self, - serializers: Dict[str, Serializer], - default_mimetype: str = ..., - ) -> None: ... - def loads(self, s: str, mimetype: Optional[str] = ...) -> Any: ... - -class AttrJSONSerializer(JSONSerializer): ... diff --git a/opensearchpy/transport.py b/opensearchpy/transport.py index c1d69d2c..44962542 100644 --- a/opensearchpy/transport.py +++ b/opensearchpy/transport.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -27,8 +28,9 @@ import time from itertools import chain +from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union -from .connection import Urllib3HttpConnection +from .connection import Connection, Urllib3HttpConnection from .connection_pool import ConnectionPool, DummyConnectionPool, EmptyConnectionPool from .exceptions import ( ConnectionError, @@ -36,10 +38,12 @@ SerializationError, TransportError, ) -from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer +from .serializer import DEFAULT_SERIALIZERS, Deserializer, JSONSerializer, Serializer -def get_host_info(node_info, host): +def get_host_info( + node_info: Dict[str, Any], host: Optional[Dict[str, Any]] +) -> Optional[Dict[str, Any]]: """ Simple callback that takes the node info from `/_cluster/nodes` and a parsed connection information and return the connection information. If @@ -67,27 +71,50 @@ class Transport(object): Main interface is the `perform_request` method. """ - DEFAULT_CONNECTION_CLASS = Urllib3HttpConnection + DEFAULT_CONNECTION_CLASS: Type[Connection] = Urllib3HttpConnection + + connection_pool: Any + deserializer: Deserializer + + max_retries: int + retry_on_timeout: bool + retry_on_status: Collection[int] + send_get_body_as: str + serializer: Serializer + connection_pool_class: Any + connection_class: Type[Connection] + kwargs: Any + hosts: Any + seed_connections: List[Connection] + sniffer_timeout: Optional[float] + sniff_on_start: bool + sniff_on_connection_fail: bool + last_sniff: float + sniff_timeout: Optional[float] + host_info_callback: Any def __init__( self, - hosts, - connection_class=None, - connection_pool_class=ConnectionPool, - host_info_callback=get_host_info, - sniff_on_start=False, - sniffer_timeout=None, - sniff_timeout=0.1, - sniff_on_connection_fail=False, - serializer=JSONSerializer(), - serializers=None, - default_mimetype="application/json", - max_retries=3, - retry_on_status=(502, 503, 504), - retry_on_timeout=False, - send_get_body_as="GET", - **kwargs - ): + hosts: Any, + connection_class: Optional[Type[Connection]] = None, + connection_pool_class: Type[ConnectionPool] = ConnectionPool, + host_info_callback: Callable[ + [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] + ] = get_host_info, + sniff_on_start: bool = False, + sniffer_timeout: Optional[float] = None, + sniff_timeout: float = 0.1, + sniff_on_connection_fail: bool = False, + serializer: Serializer = JSONSerializer(), + serializers: Optional[Mapping[str, Serializer]] = None, + default_mimetype: str = "application/json", + max_retries: int = 3, + pool_maxsize: Optional[int] = None, + retry_on_status: Collection[int] = (502, 503, 504), + retry_on_timeout: bool = False, + send_get_body_as: str = "GET", + **kwargs: Any + ) -> None: """ :arg hosts: list of dictionaries, each containing keyword arguments to create a `connection_class` instance @@ -120,6 +147,8 @@ def __init__( don't support passing bodies with GET requests. If you set this to 'POST' a POST method will be used instead, if to 'source' then the body will be serialized and passed as a query parameter `source`. + :arg pool_maxsize: Maximum connection pool size used by pool-manager + For custom connection-pooling on current session Any extra keyword arguments will be passed to the `connection_class` when creating and instance unless overridden by that connection's @@ -139,6 +168,7 @@ def __init__( self.deserializer = Deserializer(_serializers, default_mimetype) self.max_retries = max_retries + self.pool_maxsize = pool_maxsize self.retry_on_timeout = retry_on_timeout self.retry_on_status = retry_on_status self.send_get_body_as = send_get_body_as @@ -180,7 +210,7 @@ def __init__( if sniff_on_start: self.sniff_hosts(True) - def add_connection(self, host): + def add_connection(self, host: Any) -> None: """ Create a new :class:`~opensearchpy.Connection` instance and add it to the pool. @@ -189,7 +219,7 @@ def add_connection(self, host): self.hosts.append(host) self.set_connections(self.hosts) - def set_connections(self, hosts): + def set_connections(self, hosts: Any) -> None: """ Instantiate all the connections and create new connection pool to hold them. Tries to identify unchanged hosts and re-use existing @@ -199,7 +229,7 @@ def set_connections(self, hosts): """ # construct the connections - def _create_connection(host): + def _create_connection(host: Any) -> Any: # if this is not the initial setup look at the existing connection # options and identify connections that haven't changed and can be # kept around. @@ -211,11 +241,11 @@ def _create_connection(host): # previously unseen params, create new connection kwargs = self.kwargs.copy() kwargs.update(host) + if self.pool_maxsize and isinstance(self.pool_maxsize, int): + kwargs["pool_maxsize"] = self.pool_maxsize return self.connection_class(**kwargs) - connections = map(_create_connection, hosts) - - connections = list(zip(connections, hosts)) + connections = list(zip(map(_create_connection, hosts), hosts)) if len(connections) == 1: self.connection_pool = DummyConnectionPool(connections) else: @@ -224,7 +254,7 @@ def _create_connection(host): connections, **self.kwargs ) - def get_connection(self): + def get_connection(self) -> Any: """ Retrieve a :class:`~opensearchpy.Connection` instance from the :class:`~opensearchpy.ConnectionPool` instance. @@ -234,7 +264,7 @@ def get_connection(self): self.sniff_hosts() return self.connection_pool.get_connection() - def _get_sniff_data(self, initial=False): + def _get_sniff_data(self, initial: bool = False) -> Any: """ Perform the request to get sniffing information. Returns a list of dictionaries (one per node) containing all the information from the @@ -282,7 +312,7 @@ def _get_sniff_data(self, initial=False): return list(node_info["nodes"].values()) - def _get_host_info(self, host_info): + def _get_host_info(self, host_info: Any) -> Any: host = {} address = host_info.get("http", {}).get("publish_address") @@ -303,7 +333,7 @@ def _get_host_info(self, host_info): return self.host_info_callback(host_info, host) - def sniff_hosts(self, initial=False): + def sniff_hosts(self, initial: bool = False) -> Any: """ Obtain a list of nodes from the cluster and create a new connection pool using the information retrieved. @@ -315,7 +345,7 @@ def sniff_hosts(self, initial=False): """ node_info = self._get_sniff_data(initial) - hosts = list(filter(None, (self._get_host_info(n) for n in node_info))) + hosts: Any = list(filter(None, (self._get_host_info(n) for n in node_info))) # we weren't able to get any nodes or host_info_callback blocked all - # raise error. @@ -326,7 +356,7 @@ def sniff_hosts(self, initial=False): self.set_connections(hosts) - def mark_dead(self, connection): + def mark_dead(self, connection: Connection) -> None: """ Mark a connection as dead (failed) in the connection pool. If sniffing on failure is enabled this will initiate the sniffing process. @@ -338,17 +368,26 @@ def mark_dead(self, connection): if self.sniff_on_connection_fail: self.sniff_hosts() - def perform_request(self, method, url, headers=None, params=None, body=None): + def perform_request( + self, + method: str, + url: str, + params: Optional[Mapping[str, Any]] = None, + body: Any = None, + timeout: Optional[Union[int, float]] = None, + ignore: Collection[int] = (), + headers: Optional[Mapping[str, str]] = None, + ) -> Any: """ Perform the actual request. Retrieve a connection from the connection - pool, pass all the information to it's perform_request method and + pool, pass all the information to its perform_request method and return the data. If an exception was raised, mark the connection as failed and retry (up to `max_retries` times). If the operation was successful and the connection used was previously - marked as dead, mark it as live, resetting it's failure count. + marked as dead, mark it as live, resetting its failure count. :arg method: HTTP method to use :arg url: absolute url (without host) to target @@ -409,7 +448,7 @@ def perform_request(self, method, url, headers=None, params=None, body=None): raise e else: - # connection didn't fail, confirm it's live status + # connection didn't fail, confirm its live status self.connection_pool.mark_live(connection) if method == "HEAD": @@ -421,13 +460,13 @@ def perform_request(self, method, url, headers=None, params=None, body=None): ) return data - def close(self): + def close(self) -> Any: """ Explicitly closes connections """ - self.connection_pool.close() + return self.connection_pool.close() - def _resolve_request_args(self, method, params, body): + def _resolve_request_args(self, method: str, params: Any, body: Any) -> Any: """Resolves parameters for .perform_request()""" if body is not None: body = self.serializer.dumps(body) @@ -463,3 +502,6 @@ def _resolve_request_args(self, method, params, body): ignore = (ignore,) return method, params, body, ignore, timeout + + +__all__ = ["TransportError"] diff --git a/opensearchpy/transport.pyi b/opensearchpy/transport.pyi deleted file mode 100644 index dfdcedb8..00000000 --- a/opensearchpy/transport.pyi +++ /dev/null @@ -1,94 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from typing import Any, Callable, Collection, Dict, List, Mapping, Optional, Type, Union - -from .connection import Connection -from .connection_pool import ConnectionPool -from .serializer import Deserializer, Serializer - -def get_host_info( - node_info: Dict[str, Any], host: Optional[Dict[str, Any]] -) -> Optional[Dict[str, Any]]: ... - -class Transport(object): - DEFAULT_CONNECTION_CLASS: Type[Connection] - connection_pool: ConnectionPool - deserializer: Deserializer - - max_retries: int - retry_on_timeout: bool - retry_on_status: Collection[int] - send_get_body_as: str - serializer: Serializer - connection_pool_class: Type[ConnectionPool] - connection_class: Type[Connection] - kwargs: Any - hosts: Optional[List[Dict[str, Any]]] - seed_connections: List[Connection] - sniffer_timeout: Optional[float] - sniff_on_start: bool - sniff_on_connection_fail: bool - last_sniff: float - sniff_timeout: Optional[float] - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] - def __init__( - self, - hosts: Any, - connection_class: Optional[Type[Any]] = ..., - connection_pool_class: Type[ConnectionPool] = ..., - host_info_callback: Callable[ - [Dict[str, Any], Optional[Dict[str, Any]]], Optional[Dict[str, Any]] - ] = ..., - sniff_on_start: bool = ..., - sniffer_timeout: Optional[float] = ..., - sniff_timeout: float = ..., - sniff_on_connection_fail: bool = ..., - serializer: Serializer = ..., - serializers: Optional[Mapping[str, Serializer]] = ..., - default_mimetype: str = ..., - max_retries: int = ..., - retry_on_status: Collection[int] = ..., - retry_on_timeout: bool = ..., - send_get_body_as: str = ..., - **kwargs: Any - ) -> None: ... - def add_connection(self, host: Any) -> None: ... - def set_connections(self, hosts: Collection[Any]) -> None: ... - def get_connection(self) -> Connection: ... - def sniff_hosts(self, initial: bool = ...) -> None: ... - def mark_dead(self, connection: Connection) -> None: ... - def perform_request( - self, - method: str, - url: str, - headers: Optional[Mapping[str, str]] = ..., - params: Optional[Mapping[str, Any]] = ..., - body: Optional[Any] = ..., - ) -> Union[bool, Any]: ... - def close(self) -> None: ... diff --git a/samples/README.md b/samples/README.md index ad431cd8..b6e72f2c 100644 --- a/samples/README.md +++ b/samples/README.md @@ -15,5 +15,5 @@ Install [poetry](https://python-poetry.org/docs/). ``` poetry install -poetry run hello/hello.py +poetry run python hello/hello.py ``` diff --git a/samples/advanced_index_actions/advanced_index_actions_sample.py b/samples/advanced_index_actions/advanced_index_actions_sample.py new file mode 100644 index 00000000..562f82e2 --- /dev/null +++ b/samples/advanced_index_actions/advanced_index_actions_sample.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +import time + +from opensearchpy import OpenSearch + +# For cleaner output, comment in the two lines below to disable warnings and informational messages +# import urllib3 +# urllib3.disable_warnings() + + +def test_opensearch_examples() -> None: + # Set up + client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), + ) + client.indices.create(index="movies") + print("'movies' index created!") + + # Test Clear Index Cache + client.indices.clear_cache(index="movies") + print("Cache for 'movies' index cleared!") + client.indices.clear_cache(index="movies", query=True) + print("Query cache for 'movies' index cleared!") + client.indices.clear_cache(index="movies", fielddata=True, request=True) + print("Field data and request cache for 'movies' index cleared!") + + # Test Flush Index + client.indices.flush(index="movies") + print("'movies' index flushed!") + + # Test Refresh Index + client.indices.refresh(index="movies") + print("'movies' index refreshed!") + + # Test Close or Open Index + client.indices.close(index="movies") + print("'movies' index closed!") + time.sleep(2) # add sleep to ensure the index has time to close + client.indices.open(index="movies") + print("'movies' index opened!") + + # Test Force Merge Index + client.indices.forcemerge(index="movies") + print("'movies' index force merged!") + + # Test Clone + client.indices.put_settings( + index="movies", body={"index": {"blocks": {"write": True}}} + ) + print("Write operations blocked for 'movies' index!") + time.sleep(2) + client.indices.clone(index="movies", target="movies_clone") + print("'movies' index cloned to 'movies_clone'!") + client.indices.put_settings( + index="movies", body={"index": {"blocks": {"write": False}}} + ) + print("Write operations enabled for 'movies' index!") + + # Test Split + client.indices.create( + index="books", + body={ + "settings": { + "index": { + "number_of_shards": 5, + "number_of_routing_shards": 30, + "blocks": {"write": True}, + } + } + }, + ) + print("'books' index created!") + time.sleep(2) # add sleep to ensure the index has time to become read-only + client.indices.split( + index="books", + target="bigger_books", + body={"settings": {"index": {"number_of_shards": 10}}}, + ) + print("'books' index split into 'bigger_books'!") + client.indices.put_settings( + index="books", body={"index": {"blocks": {"write": False}}} + ) + print("Write operations enabled for 'books' index!") + + # Cleanup + client.indices.delete(index=["movies", "books", "movies_clone", "bigger_books"]) + print("All indices deleted!") + + +if __name__ == "__main__": + test_opensearch_examples() diff --git a/samples/aws/README.md b/samples/aws/README.md new file mode 100644 index 00000000..17ad4ee0 --- /dev/null +++ b/samples/aws/README.md @@ -0,0 +1,22 @@ +## AWS SigV4 Samples + +Create an OpenSearch domain in (AWS) which support IAM based AuthN/AuthZ. + +``` +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= +export AWS_SESSION_TOKEN= +export AWS_REGION=us-west-2 + +export SERVICE=es # use "aoss" for OpenSearch Serverless. +export ENDPOINT=https://....us-west-2.es.amazonaws.com + +poetry run aws/search-urllib.py +``` + +This will output the version of OpenSearch and a search result. + +``` +opensearch: 2.3.0 +{'director': 'Bennett Miller', 'title': 'Moneyball', 'year': 2011} +``` diff --git a/samples/aws/search-requests.py b/samples/aws/search-requests.py new file mode 100644 index 00000000..0af366f0 --- /dev/null +++ b/samples/aws/search-requests.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, RequestsAWSV4SignerAuth, RequestsHttpConnection + +# verbose logging +logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + +# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +url = urlparse(environ["ENDPOINT"]) +region = environ.get("AWS_REGION", "us-east-1") +service = environ.get("SERVICE", "es") + +credentials = Session().get_credentials() + +auth = RequestsAWSV4SignerAuth(credentials, region, service) + +client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=RequestsHttpConnection, + timeout=30, +) + +# TODO: remove when OpenSearch Serverless adds support for / +if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + +# create an index +index = "movies" +client.indices.create(index=index) + +try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") +finally: + # delete the index + client.indices.delete(index=index) diff --git a/samples/aws/search-urllib3.py b/samples/aws/search-urllib3.py new file mode 100644 index 00000000..534caf40 --- /dev/null +++ b/samples/aws/search-urllib3.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +import logging +from os import environ +from time import sleep +from urllib.parse import urlparse + +from boto3 import Session + +from opensearchpy import OpenSearch, Urllib3AWSV4SignerAuth, Urllib3HttpConnection + +# verbose logging +logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) + +# cluster endpoint, for example: my-test-domain.us-east-1.es.amazonaws.com +url = urlparse(environ["ENDPOINT"]) +region = environ.get("AWS_REGION", "us-east-1") +service = environ.get("SERVICE", "es") + +credentials = Session().get_credentials() + +auth = Urllib3AWSV4SignerAuth(credentials, region, service) + +client = OpenSearch( + hosts=[{"host": url.netloc, "port": url.port or 443}], + http_auth=auth, + use_ssl=True, + verify_certs=True, + connection_class=Urllib3HttpConnection, + timeout=30, +) + +# TODO: remove when OpenSearch Serverless adds support for / +if service == "es": + info = client.info() + print(f"{info['version']['distribution']}: {info['version']['number']}") + +# create an index +index = "movies" +client.indices.create(index=index) + +try: + # index data + document = {"director": "Bennett Miller", "title": "Moneyball", "year": 2011} + client.index(index=index, body=document, id="1") + + # wait for the document to index + sleep(1) + + # search for the document + results = client.search(body={"query": {"match": {"director": "miller"}}}) + for hit in results["hits"]["hits"]: + print(hit["_source"]) + + # delete the document + client.delete(index=index, id="1") +finally: + # delete the index + client.indices.delete(index=index) diff --git a/samples/bulk/bulk-array.py b/samples/bulk/bulk-array.py index 8df6fa63..5191a291 100755 --- a/samples/bulk/bulk-array.py +++ b/samples/bulk/bulk-array.py @@ -1,59 +1,59 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import json +from typing import Any from opensearchpy import OpenSearch # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data -data = [] +data: Any = [] for i in range(100): - data.append({ "index": {"_index": index_name, "_id": i }}) - data.append({ "value": i }) + data.append({"index": {"_index": index_name, "_id": i}}) + data.append({"value": i}) rc = client.bulk(data) if rc["errors"]: - print(f"There were errors:") + print("There were errors:") for item in rc["items"]: print(f"{item['index']['status']}: {item['index']['error']['type']}") else: @@ -61,4 +61,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/bulk/bulk-helpers.py b/samples/bulk/bulk-helpers.py index 1210ee86..3dc165c8 100755 --- a/samples/bulk/bulk-helpers.py +++ b/samples/bulk/bulk-helpers.py @@ -1,58 +1,56 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import json from opensearchpy import OpenSearch, helpers # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data data = [] for i in range(100): - data.append({ "_index": index_name, "_id": i, "value": i }) + data.append({"_index": index_name, "_id": i, "value": i}) rc = helpers.bulk(client, data) print(f"Bulk-inserted {rc[0]} items.") # delete index client.indices.delete(index=index_name) - diff --git a/samples/bulk/bulk-ld.py b/samples/bulk/bulk-ld.py index 5487c68f..fff0ae98 100755 --- a/samples/bulk/bulk-ld.py +++ b/samples/bulk/bulk-ld.py @@ -1,59 +1,59 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + -import os import json +import os from opensearchpy import OpenSearch # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "my-index" if not client.indices.exists(index_name): - - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "mappings":{ + "mappings": { "properties": { - "value": { - "type": "float" - }, + "value": {"type": "float"}, } } - } + }, ) # index data -data = '' +data = "" for i in range(100): - data += json.dumps({ "index": {"_index": index_name, "_id": i }}) + "\n" - data += json.dumps({ "value": i }) + "\n" + data += json.dumps({"index": {"_index": index_name, "_id": i}}) + "\n" + data += json.dumps({"value": i}) + "\n" rc = client.bulk(data) if rc["errors"]: - print(f"There were errors:") + print("There were errors:") for item in rc["items"]: print(f"{item['index']['status']}: {item['index']['error']['type']}") else: @@ -61,4 +61,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/document_lifecycle/document_lifecycle_sample.py b/samples/document_lifecycle/document_lifecycle_sample.py new file mode 100644 index 00000000..1d338da7 --- /dev/null +++ b/samples/document_lifecycle/document_lifecycle_sample.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from opensearchpy import OpenSearch + +# For cleaner output, comment in the two lines below to disable warnings and informational messages +# import urllib3 +# urllib3.disable_warnings() + + +# Connect to OpenSearch +client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), +) + +# Create an index +index = "movies" +if not client.indices.exists(index=index): + client.indices.create(index=index) + +# Create documents +client.index(index=index, id=1, body={"title": "Beauty and the Beast", "year": 1991}) +client.index( + index=index, + id=2, + body={"title": "Beauty and the Beast - Live Action", "year": 2017}, +) + +# Index a document +client.index(index=index, id=2, body={"title": "The Lion King", "year": 1994}) + +# Create a document with auto-generated ID +result = client.index(index=index, body={"title": "The Lion King 2", "year": 1998}) +print(result) + +# Get a document +result = client.get(index=index, id=1)["_source"] +print(result) + +# Get a document with _source includes +result = client.get(index=index, id=1, _source_includes=["title"])["_source"] +print(result) + +# Get a document with _source excludes +result = client.get(index=index, id=1, _source_excludes=["title"])["_source"] +print(result) + +# Get multiple documents +result = client.mget(index=index, body={"docs": [{"_id": 1}, {"_id": 2}]})["docs"] +print(result) + +# Check if a document exists +result = client.exists(index=index, id=1) +print(result) + +# Update a document +client.update(index=index, id=1, body={"doc": {"year": 1995}}) + +# Update a document using script +client.update(index=index, id=1, body={"script": {"source": "ctx._source.year += 5"}}) + +# Update multiple documents by query +client.update_by_query( + index=index, + body={ + "script": {"source": "ctx._source.year -= 1"}, + "query": {"range": {"year": {"gt": 2023}}}, + }, +) + +# Delete a document +client.delete(index=index, id=1) + +# Delete a document with ignore 404 +client.delete(index=index, id=1, ignore=404) + +# Delete multiple documents by query +client.delete_by_query(index=index, body={"query": {"range": {"year": {"gt": 2023}}}}) + +# Delete the index +client.indices.delete(index=index) +print("Deleted index!") diff --git a/samples/hello/hello-async.py b/samples/hello/hello-async.py index 572ef91c..8606a17d 100755 --- a/samples/hello/hello-async.py +++ b/samples/hello/hello-async.py @@ -1,107 +1,96 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import asyncio from opensearchpy import AsyncOpenSearch -async def main(): + +async def main() -> None: # connect to OpenSearch - host = 'localhost' + host = "localhost" port = 9200 - auth = ('admin', 'admin') # For testing only. Don't store credentials in code. + auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = AsyncOpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) try: - info = await client.info() - print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + info = await client.info() + print( + f"Welcome to {info['version']['distribution']} {info['version']['number']}!" + ) + + # create an index + + index_name = "test-index" + + index_body = {"settings": {"index": {"number_of_shards": 4}}} + + if not await client.indices.exists(index=index_name): + await client.indices.create(index_name, body=index_body) + + # add some documents to the index, asynchronously + await asyncio.gather( + *[ + client.index( + index=index_name, + body={ + "title": f"Moneyball {i}", + "director": "Bennett Miller", + "year": "2011", + }, + id=i, + ) + for i in range(10) + ] + ) - # create an index + # refresh the index + await client.indices.refresh(index=index_name) - index_name = 'test-index' + # search for a document + q = "miller" - index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } - } - if not await client.indices.exists(index=index_name): - await client.indices.create( - index_name, - body=index_body + results = await client.search(body=query, index=index_name) + + for hit in results["hits"]["hits"]: + print(hit) + + # delete the documents + await asyncio.gather( + *[client.delete(index=index_name, id=i) for i in range(10)] ) - # add some documents to the index, asynchronously - await asyncio.gather(*[ - client.index( - index = index_name, - body = { - 'title': f"Moneyball {i}", - 'director': 'Bennett Miller', - 'year': '2011' - }, - id = i - ) for i in range(10) - ]) - - # refresh the index - await client.indices.refresh(index=index_name) - - # search for a document - q = 'miller' - - query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } - } - - results = await client.search( - body = query, - index = index_name - ) - - for hit in results["hits"]["hits"]: - print(hit) - - # delete the documents - await asyncio.gather(*[ - client.delete( - index = index_name, - id = i - ) for i in range(10) - ]) - - # delete the index - await client.indices.delete( - index = index_name - ) - - finally: - await client.close() + # delete the index + await client.indices.delete(index=index_name) + + finally: + await client.close() + if __name__ == "__main__": loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main()) loop.close() - diff --git a/samples/hello/hello.py b/samples/hello/hello.py index d72c2ab7..0b589c9d 100755 --- a/samples/hello/hello.py +++ b/samples/hello/hello.py @@ -1,25 +1,30 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + from opensearchpy import OpenSearch # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) info = client.info() @@ -27,76 +32,45 @@ # create an index -index_name = 'test-index' +index_name = "test-index" -index_body = { - 'settings': { - 'index': { - 'number_of_shards': 4 - } - } -} +index_body = {"settings": {"index": {"number_of_shards": 4}}} -response = client.indices.create( - index_name, - body=index_body -) +response = client.indices.create(index_name, body=index_body) print(response) # add a document to the index -document = { - 'title': 'Moneyball', - 'director': 'Bennett Miller', - 'year': '2011' -} +document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} -id = '1' +id = "1" -response = client.index( - index = index_name, - body = document, - id = id, - refresh = True -) +response = client.index(index=index_name, body=document, id=id, refresh=True) print(response) # search for a document -q = 'miller' +q = "miller" query = { - 'size': 5, - 'query': { - 'multi_match': { - 'query': q, - 'fields': ['title^2', 'director'] - } - } + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, } -response = client.search( - body = query, - index = index_name -) +response = client.search(body=query, index=index_name) print(response) # delete the document -response = client.delete( - index = index_name, - id = id -) +response = client.delete(index=index_name, id=id) print(response) # delete the index -response = client.indices.delete( - index = index_name -) +response = client.indices.delete(index=index_name) print(response) diff --git a/samples/index_template/index_template_sample.py b/samples/index_template/index_template_sample.py new file mode 100644 index 00000000..4fe580ac --- /dev/null +++ b/samples/index_template/index_template_sample.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +from opensearchpy import OpenSearch + +# Create a client instance +client = OpenSearch( + hosts=["https://localhost:9200"], + use_ssl=True, + verify_certs=False, + http_auth=("admin", "admin"), +) + +# You can create an index template to define default settings and mappings for indices of certain patterns. The following example creates an index template named `books` with default settings and mappings for indices of the `books-*` pattern: +client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}}, + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + }, + }, + }, +) + +# Now, when you create an index that matches the `books-*` pattern, OpenSearch will automatically apply the template's settings and mappings to the index. Let's create an index named books-nonfiction and verify that its settings and mappings match those of the template: +client.indices.create(index="books-nonfiction") +print(client.indices.get(index="books-nonfiction")) + +# If multiple index templates match the index's name, OpenSearch will apply the template with the highest `priority`. The following example creates two index templates named `books-*` and `books-fiction-*` with different settings: +client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "priority": 1, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, +) + +client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "priority": 2, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, + }, +) + +# # Test multiple index templates +client.indices.create(index="books-fiction-romance") +print(client.indices.get(index="books-fiction-romance")) + + +# Composable index templates are a new type of index template that allow you to define multiple component templates and compose them into a final template. The following example creates a component template named `books_mappings` with default mappings for indices of the `books-*` and `books-fiction-*` patterns: +client.cluster.put_component_template( + name="books_mappings", + body={ + "template": { + "mappings": { + "properties": { + "title": {"type": "text"}, + "author": {"type": "text"}, + "published_on": {"type": "date"}, + "pages": {"type": "integer"}, + } + } + } + }, +) + +client.indices.put_index_template( + name="books", + body={ + "index_patterns": ["books-*"], + "composed_of": ["books_mappings"], + "priority": 4, + "template": { + "settings": {"index": {"number_of_shards": 3, "number_of_replicas": 0}} + }, + }, +) + +client.indices.put_index_template( + name="books-fiction", + body={ + "index_patterns": ["books-fiction-*"], + "composed_of": ["books_mappings"], + "priority": 5, + "template": { + "settings": {"index": {"number_of_shards": 1, "number_of_replicas": 1}} + }, + }, +) + + +# Test composable index templates +client.indices.create(index="books-fiction-horror") +print(client.indices.get(index="books-fiction-horror")) + +# Get an index template +print(client.indices.get_index_template(name="books")) + +# Delete an index template +client.indices.delete_index_template(name="books") + +# Cleanup +client.indices.delete(index="books-*") +client.indices.delete_index_template(name="books-fiction") +client.cluster.delete_component_template(name="books_mappings") diff --git a/samples/json/json-hello-async.py b/samples/json/json-hello-async.py new file mode 100755 index 00000000..fbadece6 --- /dev/null +++ b/samples/json/json-hello-async.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +import asyncio + +from opensearchpy import AsyncOpenSearch + + +async def main() -> None: + # connect to OpenSearch + host = "localhost" + port = 9200 + auth = ("admin", "admin") # For testing only. Don't store credentials in code. + + client = AsyncOpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, + ) + + try: + info = await client.transport.perform_request("GET", "/") + print( + f"Welcome to {info['version']['distribution']} {info['version']['number']}!" + ) + + # create an index + + index_name = "movies" + + index_body = {"settings": {"index": {"number_of_shards": 4}}} + + print( + await client.transport.perform_request( + "PUT", f"/{index_name}", body=index_body + ) + ) + + # add a document to the index + + document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + + id = "1" + + print( + await client.transport.perform_request( + "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document + ) + ) + + # search for a document + + q = "miller" + + query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, + } + + print( + await client.transport.perform_request( + "POST", f"/{index_name}/_search", body=query + ) + ) + + # delete the document + + print( + await client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}") + ) + + # delete the index + + print(await client.transport.perform_request("DELETE", f"/{index_name}")) + + finally: + await client.close() + + +if __name__ == "__main__": + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + loop.run_until_complete(main()) + loop.close() diff --git a/samples/json/json-hello.py b/samples/json/json-hello.py new file mode 100755 index 00000000..5df36f5f --- /dev/null +++ b/samples/json/json-hello.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + +from opensearchpy import OpenSearch + +# connect to OpenSearch + +host = "localhost" +port = 9200 +auth = ("admin", "admin") # For testing only. Don't store credentials in code. + +client = OpenSearch( + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, +) + +info = client.transport.perform_request("GET", "/") +print(f"Welcome to {info['version']['distribution']} {info['version']['number']}!") + +# create an index + +index_name = "movies" + +index_body = {"settings": {"index": {"number_of_shards": 4}}} + +print(client.transport.perform_request("PUT", f"/{index_name}", body=index_body)) + +# add a document to the index + +document = {"title": "Moneyball", "director": "Bennett Miller", "year": "2011"} + +id = "1" + +print( + client.transport.perform_request( + "PUT", f"/{index_name}/_doc/{id}?refresh=true", body=document + ) +) + +# search for a document + +q = "miller" + +query = { + "size": 5, + "query": {"multi_match": {"query": q, "fields": ["title^2", "director"]}}, +} + +print(client.transport.perform_request("POST", f"/{index_name}/_search", body=query)) + +# delete the document + +print(client.transport.perform_request("DELETE", f"/{index_name}/_doc/{id}")) + +# delete the index + +print(client.transport.perform_request("DELETE", f"/{index_name}")) diff --git a/samples/knn/knn-async-basics.py b/samples/knn/knn-async-basics.py index c237aa46..aa0acf6e 100755 --- a/samples/knn/knn-async-basics.py +++ b/samples/knn/knn-async-basics.py @@ -1,33 +1,36 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import asyncio import os import random -from opensearchpy import AsyncOpenSearch, AsyncHttpConnection, helpers +from opensearchpy import AsyncHttpConnection, AsyncOpenSearch, helpers + -async def main(): +async def main() -> None: # connect to an instance of OpenSearch - host = os.getenv('HOST', default='localhost') - port = int(os.getenv('PORT', 9200)) - auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') - ) + host = os.getenv("HOST", default="localhost") + port = int(os.getenv("PORT", 9200)) + auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = AsyncOpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, connection_class=AsyncHttpConnection, - ssl_show_warn = False + ssl_show_warn=False, ) # check whether an index exists @@ -35,34 +38,32 @@ async def main(): dimensions = 5 if not await client.indices.exists(index_name): - await client.indices.create(index_name, + await client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] for i in range(10): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - }) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) # bulk index await helpers.async_bulk(client, vectors) @@ -71,8 +72,8 @@ async def main(): # search vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} ...") search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} @@ -85,9 +86,9 @@ async def main(): await client.close() + if __name__ == "__main__": loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main()) loop.close() - diff --git a/samples/knn/knn-basics.py b/samples/knn/knn-basics.py index 7868df7e..96efb028 100755 --- a/samples/knn/knn-basics.py +++ b/samples/knn/knn-basics.py @@ -1,10 +1,15 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os import random @@ -13,19 +18,16 @@ # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists @@ -33,34 +35,32 @@ dimensions = 5 if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] for i in range(10): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - }) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + } + ) # bulk index helpers.bulk(client, vectors) @@ -69,8 +69,8 @@ # search vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) +for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} ...") search_query = {"query": {"knn": {"values": {"vector": vec, "k": 3}}}} @@ -80,4 +80,3 @@ # delete index client.indices.delete(index=index_name) - diff --git a/samples/knn/knn-boolean-filter.py b/samples/knn/knn-boolean-filter.py index a99b1683..5ae7704c 100755 --- a/samples/knn/knn-boolean-filter.py +++ b/samples/knn/knn-boolean-filter.py @@ -1,10 +1,15 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os import random @@ -13,19 +18,16 @@ # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists @@ -33,38 +35,34 @@ dimensions = 5 if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ - "index.knn": True - }, - "mappings":{ + "settings": {"index.knn": True}, + "mappings": { "properties": { - "values": { - "type": "knn_vector", - "dimension": dimensions - }, + "values": {"type": "knn_vector", "dimension": dimensions}, } - } - } + }, + }, ) # index data vectors = [] -genres = ['fiction', 'drama', 'romance'] +genres = ["fiction", "drama", "romance"] for i in range(3000): vec = [] - for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) - - vectors.append({ - "_index": index_name, - "_id": i, - "values": vec, - "metadata": { - "genre": random.choice(genres) + for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) + + vectors.append( + { + "_index": index_name, + "_id": i, + "values": vec, + "metadata": {"genre": random.choice(genres)}, } - }) + ) # bulk index helpers.bulk(client, vectors) @@ -74,30 +72,15 @@ # search genre = random.choice(genres) vec = [] -for j in range(dimensions): - vec.append(round(random.uniform(0, 1), 2)) +for j in range(dimensions): + vec.append(round(random.uniform(0, 1), 2)) print(f"Searching for {vec} with the '{genre}' genre ...") search_query = { "query": { "bool": { - "filter": { - "bool": { - "must": [{ - "term": { - "metadata.genre": genre - } - }] - } - }, - "must": { - "knn": { - "values": { - "vector": vec, - "k": 5 - } - } - } + "filter": {"bool": {"must": [{"term": {"metadata.genre": genre}}]}}, + "must": {"knn": {"values": {"vector": vec, "k": 5}}}, } } } diff --git a/samples/knn/knn-efficient-filter.py b/samples/knn/knn-efficient-filter.py index 357eeb6a..cbfd41ad 100755 --- a/samples/knn/knn-efficient-filter.py +++ b/samples/knn/knn-efficient-filter.py @@ -1,79 +1,150 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + import os -import random from opensearchpy import OpenSearch, helpers # connect to an instance of OpenSearch -host = os.getenv('HOST', default='localhost') -port = int(os.getenv('PORT', 9200)) -auth = ( - os.getenv('USERNAME', 'admin'), - os.getenv('PASSWORD', 'admin') -) +host = os.getenv("HOST", default="localhost") +port = int(os.getenv("PORT", 9200)) +auth = (os.getenv("USERNAME", "admin"), os.getenv("PASSWORD", "admin")) client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # check whether an index exists index_name = "hotels-index" if not client.indices.exists(index_name): - client.indices.create(index_name, + client.indices.create( + index_name, body={ - "settings":{ + "settings": { "index.knn": True, "knn.algo_param.ef_search": 100, "number_of_shards": 1, - "number_of_replicas": 0 + "number_of_replicas": 0, }, - "mappings":{ + "mappings": { "properties": { "location": { - "type": "knn_vector", + "type": "knn_vector", "dimension": 2, "method": { "name": "hnsw", "space_type": "l2", "engine": "lucene", - "parameters": { - "ef_construction": 100, - "m": 16 - } - } + "parameters": {"ef_construction": 100, "m": 16}, + }, }, } - } - } + }, + }, ) # index data vectors = [ - { "_index": "hotels-index", "_id": "1", "location": [5.2, 4.4], "parking" : "true", "rating" : 5 }, - { "_index": "hotels-index", "_id": "2", "location": [5.2, 3.9], "parking" : "false", "rating" : 4 }, - { "_index": "hotels-index", "_id": "3", "location": [4.9, 3.4], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "4", "location": [4.2, 4.6], "parking" : "false", "rating" : 6}, - { "_index": "hotels-index", "_id": "5", "location": [3.3, 4.5], "parking" : "true", "rating" : 8 }, - { "_index": "hotels-index", "_id": "6", "location": [6.4, 3.4], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "7", "location": [4.2, 6.2], "parking" : "true", "rating" : 5 }, - { "_index": "hotels-index", "_id": "8", "location": [2.4, 4.0], "parking" : "true", "rating" : 8 }, - { "_index": "hotels-index", "_id": "9", "location": [1.4, 3.2], "parking" : "false", "rating" : 5 }, - { "_index": "hotels-index", "_id": "10", "location": [7.0, 9.9], "parking" : "true", "rating" : 9 }, - { "_index": "hotels-index", "_id": "11", "location": [3.0, 2.3], "parking" : "false", "rating" : 6 }, - { "_index": "hotels-index", "_id": "12", "location": [5.0, 1.0], "parking" : "true", "rating" : 3 }, + { + "_index": "hotels-index", + "_id": "1", + "location": [5.2, 4.4], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "2", + "location": [5.2, 3.9], + "parking": "false", + "rating": 4, + }, + { + "_index": "hotels-index", + "_id": "3", + "location": [4.9, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "4", + "location": [4.2, 4.6], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "5", + "location": [3.3, 4.5], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "6", + "location": [6.4, 3.4], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "7", + "location": [4.2, 6.2], + "parking": "true", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "8", + "location": [2.4, 4.0], + "parking": "true", + "rating": 8, + }, + { + "_index": "hotels-index", + "_id": "9", + "location": [1.4, 3.2], + "parking": "false", + "rating": 5, + }, + { + "_index": "hotels-index", + "_id": "10", + "location": [7.0, 9.9], + "parking": "true", + "rating": 9, + }, + { + "_index": "hotels-index", + "_id": "11", + "location": [3.0, 2.3], + "parking": "false", + "rating": 6, + }, + { + "_index": "hotels-index", + "_id": "12", + "location": [5.0, 1.0], + "parking": "true", + "rating": 3, + }, ] helpers.bulk(client, vectors) @@ -86,30 +157,19 @@ "query": { "knn": { "location": { - "vector": [5, 4], - "k": 3, - "filter": { - "bool": { - "must": [ - { - "range": { - "rating": { - "gte": 8, - "lte": 10 - } - } - }, - { - "term": { - "parking": "true" - } - } + "vector": [5, 4], + "k": 3, + "filter": { + "bool": { + "must": [ + {"range": {"rating": {"gte": 8, "lte": 10}}}, + {"term": {"parking": "true"}}, ] } - } + }, } } - } + }, } results = client.search(index=index_name, body=search_query) diff --git a/samples/poetry.lock b/samples/poetry.lock index e8e8b7cc..55fb558d 100644 --- a/samples/poetry.lock +++ b/samples/poetry.lock @@ -1,174 +1,45 @@ # This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] -name = "aiohttp" -version = "3.8.5" -description = "Async http client/server framework (asyncio)" +name = "boto3" +version = "1.28.67" +description = "The AWS SDK for Python" optional = false -python-versions = ">=3.6" +python-versions = ">= 3.7" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "boto3-1.28.67-py3-none-any.whl", hash = "sha256:7d17f987a8b4f804e5ae509a30589736a72c6db7b0e2fb1338997128fdc9a3ec"}, + {file = "boto3-1.28.67.tar.gz", hash = "sha256:8db91c0648c9dcde1cf7fb4c15cd50da1fdef573595a9b9c769a303c7531b9a6"}, ] [package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} -yarl = ">=1.0,<2.0" +botocore = ">=1.31.67,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.7.0,<0.8.0" [package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" +name = "botocore" +version = "1.31.67" +description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">=3.7" +python-versions = ">= 3.7" files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "botocore-1.31.67-py3-none-any.whl", hash = "sha256:487fb6ee4a6612613da370599b1a1aca0e159dd9e94b2e8aaa8e6ad9cc546ded"}, + {file = "botocore-1.31.67.tar.gz", hash = "sha256:ab3b73a2e03efa1c534a94f8db4a5cf45629a53e5478d2d154b0a3e2ffb05249"}, ] [package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.6" -files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -optional = false -python-versions = ">=3.5" -files = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, ] -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +crt = ["awscrt (==0.16.26)"] [[package]] name = "certifi" @@ -183,169 +54,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "frozenlist" -version = "1.3.3" -description = "A list-like structure which implements collections.abc.MutableSequence" -optional = false -python-versions = ">=3.7" -files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] @@ -360,134 +163,39 @@ files = [ ] [[package]] -name = "importlib-metadata" -version = "6.7.0" -description = "Read metadata from Python packages" +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, -] - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] [[package]] name = "opensearch-py" -version = "2.2.0" +version = "2.3.2" description = "Python client for OpenSearch" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" -files = [] -develop = false +files = [ + {file = "opensearch-py-2.3.2.tar.gz", hash = "sha256:96e470b55107fd5bfd873722dc9808c333360eacfa174341f5cc2d021aa30448"}, + {file = "opensearch_py-2.3.2-py2.py3-none-any.whl", hash = "sha256:b1d6607380c8f19d90c142470939d051f0bac96069ce0ac25970b3c39c431f8b"}, +] [package.dependencies] -aiohttp = {version = ">=3,<4", optional = true, markers = "extra == \"async\""} certifi = ">=2022.12.07" python-dateutil = "*" requests = ">=2.4.0,<3.0.0" six = "*" -urllib3 = ">=1.21.1,<2" +urllib3 = ">=1.26.9" [package.extras] async = ["aiohttp (>=3,<4)"] -develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst_parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -docs = ["myst_parser", "sphinx", "sphinx_copybutton", "sphinx_rtd_theme"] -kerberos = ["requests_kerberos"] - -[package.source] -type = "directory" -url = ".." +develop = ["black", "botocore", "coverage (<7.0.0)", "jinja2", "mock", "myst-parser", "pytest (>=3.0.0)", "pytest-cov", "pytest-mock (<4.0.0)", "pytz", "pyyaml", "requests (>=2.0.0,<3.0.0)", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +docs = ["myst-parser", "sphinx", "sphinx-copybutton", "sphinx-rtd-theme"] +kerberos = ["requests-kerberos"] [[package]] name = "python-dateutil" @@ -524,6 +232,23 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "s3transfer" +version = "0.7.0" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, + {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + [[package]] name = "six" version = "1.16.0" @@ -535,137 +260,40 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - -[[package]] -name = "zipp" -version = "3.15.0" -description = "Backport of pathlib-compatible object wrapper for zip files" +name = "urllib3" +version = "2.0.7" +description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, + {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "daae1667da61b85f9bbc2e5c484721c424594da7cb08fe9cf85c08d6731bcc52" +content-hash = "1309989011bed3cb46e36fc451b65f040ef9fe9cecbe3f3706be240d4ea6d52e" diff --git a/samples/pyproject.toml b/samples/pyproject.toml index 380e2c9a..8a89367f 100644 --- a/samples/pyproject.toml +++ b/samples/pyproject.toml @@ -8,7 +8,8 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.7" -opensearch-py = { path = "../", extras=["async"] } +opensearch-py = { path = "../", extras=["async"], develop = true } +boto3 = "^1.28" [build-system] requires = ["poetry-core"] diff --git a/samples/security/roles.py b/samples/security/roles.py index a77d6eb0..8a2d1ef5 100644 --- a/samples/security/roles.py +++ b/samples/security/roles.py @@ -1,10 +1,14 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. # A basic OpenSearch sample that create and manage roles. @@ -13,16 +17,16 @@ # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # Create a Role @@ -30,16 +34,16 @@ role_name = "test-role" role_content = { - "cluster_permissions": ["cluster_monitor"], - "index_permissions": [ - { - "index_patterns": ["index", "test-*"], - "allowed_actions": [ - "data_access", - "indices_monitor", - ], - } - ], + "cluster_permissions": ["cluster_monitor"], + "index_permissions": [ + { + "index_patterns": ["index", "test-*"], + "allowed_actions": [ + "data_access", + "indices_monitor", + ], + } + ], } response = client.security.create_role(role_name, body=role_content) diff --git a/samples/security/users.py b/samples/security/users.py index b4bb8e3b..0a778b8d 100644 --- a/samples/security/users.py +++ b/samples/security/users.py @@ -1,10 +1,14 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. # A basic OpenSearch sample that create and manage users. @@ -13,16 +17,16 @@ # connect to OpenSearch -host = 'localhost' +host = "localhost" port = 9200 -auth = ('admin', 'admin') # For testing only. Don't store credentials in code. +auth = ("admin", "admin") # For testing only. Don't store credentials in code. client = OpenSearch( - hosts = [{'host': host, 'port': port}], - http_auth = auth, - use_ssl = True, - verify_certs = False, - ssl_show_warn = False + hosts=[{"host": host, "port": port}], + http_auth=auth, + use_ssl=True, + verify_certs=False, + ssl_show_warn=False, ) # Create a User diff --git a/setup.py b/setup.py index 3bce64d1..b608990e 100644 --- a/setup.py +++ b/setup.py @@ -27,18 +27,18 @@ import re -import sys from os.path import abspath, dirname, join from setuptools import find_packages, setup package_name = "opensearch-py" +package_version = "" base_dir = abspath(dirname(__file__)) with open(join(base_dir, package_name.replace("-", ""), "_version.py")) as f: - package_version = re.search( - r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read() - ).group(1) + m = re.search(r"__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read()) + if m: + package_version = m.group(1) with open(join(base_dir, "README.md")) as f: long_description = f.read().strip() @@ -50,12 +50,11 @@ if package == module_dir or package.startswith(module_dir + ".") ] install_requires = [ - "urllib3>=1.21.1", + "urllib3>=1.26.17", "requests>=2.4.0, <3.0.0", "six", "python-dateutil", - # ipaddress is included in stdlib since python 3.3 - 'ipaddress; python_version<"3.3"', + "certifi>=2022.12.07", ] tests_require = [ "requests>=2.0.0, <3.0.0", @@ -65,11 +64,9 @@ "pytest>=3.0.0", "pytest-cov", "pytz", - "botocore;python_version>='3.6'", + "botocore", + "pytest-mock<4.0.0", ] -if sys.version_info >= (3, 6): - tests_require.append("pytest-mock<4.0.0") - install_requires.append("certifi>=2022.12.07") async_require = ["aiohttp>=3,<4"] @@ -94,7 +91,7 @@ "Issue Tracker": "https://github.com/opensearch-project/opensearch-py/issues", }, packages=packages, - package_data={"opensearchpy": ["py.typed", "*.pyi"]}, + package_data={"opensearchpy": ["py.typed"]}, include_package_data=True, zip_safe=False, classifiers=[ @@ -103,11 +100,6 @@ "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", diff --git a/test_opensearchpy/TestHttpServer.py b/test_opensearchpy/TestHttpServer.py index e96670cc..3d8b31fb 100644 --- a/test_opensearchpy/TestHttpServer.py +++ b/test_opensearchpy/TestHttpServer.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -10,10 +11,11 @@ import json import threading from http.server import BaseHTTPRequestHandler, HTTPServer +from typing import Any class TestHTTPRequestHandler(BaseHTTPRequestHandler): - def do_GET(self): + def do_GET(self) -> None: headers = self.headers if self.path == "/redirect": @@ -39,19 +41,20 @@ def do_GET(self): class TestHTTPServer(HTTPServer): __test__ = False + _server_thread: Any - def __init__(self, host="localhost", port=8080): + def __init__(self, host: str = "localhost", port: int = 8080) -> None: super().__init__((host, port), TestHTTPRequestHandler) self._server_thread = None - def start(self): + def start(self) -> None: if self._server_thread is not None: return self._server_thread = threading.Thread(target=self.serve_forever) self._server_thread.start() - def stop(self): + def stop(self) -> None: if self._server_thread is None: return self.socket.close() diff --git a/test_opensearchpy/__init__.py b/test_opensearchpy/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/__init__.py +++ b/test_opensearchpy/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/run_tests.py b/test_opensearchpy/run_tests.py index e0461af7..b37fd598 100755 --- a/test_opensearchpy/run_tests.py +++ b/test_opensearchpy/run_tests.py @@ -4,6 +4,11 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. @@ -32,9 +37,10 @@ import sys from os import environ from os.path import abspath, dirname, exists, join, pardir +from typing import Any -def fetch_opensearch_repo(): +def fetch_opensearch_repo() -> None: # user is manually setting YAML dir, don't tamper with it if "TEST_OPENSEARCH_YAML_DIR" in environ: return @@ -83,8 +89,8 @@ def fetch_opensearch_repo(): subprocess.check_call("cd %s && git fetch origin %s" % (repo_path, sha), shell=True) -def run_all(argv=None): - sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") +def run_all(argv: Any = None) -> None: + sys.exitfunc = lambda: sys.stderr.write("Shutting down....\n") # type: ignore # fetch yaml tests anywhere that's not GitHub Actions if "GITHUB_ACTION" not in environ: fetch_opensearch_repo() @@ -116,27 +122,19 @@ def run_all(argv=None): if test_pattern: argv.append("-k %s" % test_pattern) else: - ignores = [] - # Python 3.6+ is required for async - if sys.version_info < (3, 6): - ignores.append("test_opensearchpy/test_async/") - - ignores.extend( - [ - "test_opensearchpy/test_server/", - "test_opensearchpy/test_server_secured/", - "test_opensearchpy/test_async/test_server/", - "test_opensearchpy/test_async/test_server_secured/", - ] - ) + ignores = [ + "test_opensearchpy/test_server/", + "test_opensearchpy/test_server_secured/", + "test_opensearchpy/test_async/test_server/", + "test_opensearchpy/test_async/test_server_secured/", + ] # Jenkins/Github actions, only run server tests if environ.get("TEST_TYPE") == "server": test_dir = abspath(dirname(__file__)) if secured: argv.append(join(test_dir, "test_server_secured")) - if sys.version_info >= (3, 6): - argv.append(join(test_dir, "test_async/test_server_secured")) + argv.append(join(test_dir, "test_async/test_server_secured")) ignores.extend( [ "test_opensearchpy/test_server/", @@ -145,8 +143,7 @@ def run_all(argv=None): ) else: argv.append(join(test_dir, "test_server")) - if sys.version_info >= (3, 6): - argv.append(join(test_dir, "test_async/test_server")) + argv.append(join(test_dir, "test_async/test_server")) ignores.extend( [ "test_opensearchpy/test_server_secured/", diff --git a/test_opensearchpy/test_async/__init__.py b/test_opensearchpy/test_async/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/__init__.py +++ b/test_opensearchpy/test_async/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 147a6a3a..9413d0e8 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -32,9 +32,11 @@ import ssl import warnings from platform import python_version +from typing import Any import aiohttp import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from multidict import CIMultiDict from pytest import raises @@ -45,46 +47,41 @@ from opensearchpy.exceptions import ConnectionError, TransportError from test_opensearchpy.TestHttpServer import TestHTTPServer -pytestmark = pytest.mark.asyncio - - -def gzip_decompress(data): - buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") - return buf.read() +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAIOHttpConnection: async def _get_mock_connection( self, - connection_params={}, - response_code=200, - response_body=b"{}", - response_headers={}, - ): + connection_params: Any = {}, + response_code: int = 200, + response_body: bytes = b"{}", + response_headers: Any = {}, + ) -> Any: con = AIOHttpConnection(**connection_params) await con._create_aiohttp_session() - def _dummy_request(*args, **kwargs): + def _dummy_request(*args: Any, **kwargs: Any) -> Any: class DummyResponse: - async def __aenter__(self, *_, **__): + async def __aenter__(self, *_: Any, **__: Any) -> Any: return self - async def __aexit__(self, *_, **__): + async def __aexit__(self, *_: Any, **__: Any) -> None: pass - async def text(self): + async def text(self) -> Any: return response_body.decode("utf-8", "surrogatepass") - dummy_response = DummyResponse() + dummy_response: Any = DummyResponse() dummy_response.headers = CIMultiDict(**response_headers) dummy_response.status = response_code - _dummy_request.call_args = (args, kwargs) + _dummy_request.call_args = (args, kwargs) # type: ignore return dummy_response con.session.request = _dummy_request return con - async def test_ssl_context(self): + async def test_ssl_context(self) -> None: try: context = ssl.create_default_context() except AttributeError: @@ -100,11 +97,11 @@ async def test_ssl_context(self): assert con.use_ssl assert con.session.connector._ssl == context - async def test_opaque_id(self): + async def test_opaque_id(self) -> None: con = AIOHttpConnection(opaque_id="app-1") assert con.headers["x-opaque-id"] == "app-1" - async def test_no_http_compression(self): + async def test_no_http_compression(self) -> None: con = await self._get_mock_connection() assert not con.http_compress assert "accept-encoding" not in con.headers @@ -117,7 +114,7 @@ async def test_no_http_compression(self): assert "accept-encoding" not in kwargs["headers"] assert "content-encoding" not in kwargs["headers"] - async def test_http_compression(self): + async def test_http_compression(self) -> None: con = await self._get_mock_connection({"http_compress": True}) assert con.http_compress assert con.headers["accept-encoding"] == "gzip,deflate" @@ -130,7 +127,8 @@ async def test_http_compression(self): _, kwargs = con.session.request.call_args - assert gzip_decompress(kwargs["data"]) == b"{}" + buf = gzip.GzipFile(fileobj=io.BytesIO(kwargs["data"]), mode="rb") + assert buf.read() == b"{}" assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert kwargs["headers"]["content-encoding"] == "gzip" @@ -142,7 +140,7 @@ async def test_http_compression(self): assert kwargs["headers"]["accept-encoding"] == "gzip,deflate" assert "content-encoding" not in kwargs["headers"] - async def test_url_prefix(self): + async def test_url_prefix(self) -> None: con = await self._get_mock_connection( connection_params={"url_prefix": "/_search/"} ) @@ -154,18 +152,18 @@ async def test_url_prefix(self): method, yarl_url = con.session.request.call_args[0] assert method == "GET" and str(yarl_url) == "http://localhost:9200/_search/" - async def test_default_user_agent(self): + async def test_default_user_agent(self) -> None: con = AIOHttpConnection() assert con._get_default_user_agent() == "opensearch-py/%s (Python %s)" % ( __versionstr__, python_version(), ) - async def test_timeout_set(self): + async def test_timeout_set(self) -> None: con = AIOHttpConnection(timeout=42) assert 42 == con.timeout - async def test_keep_alive_is_on_by_default(self): + async def test_keep_alive_is_on_by_default(self) -> None: con = AIOHttpConnection() assert { "connection": "keep-alive", @@ -173,7 +171,7 @@ async def test_keep_alive_is_on_by_default(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth(self): + async def test_http_auth(self) -> None: con = AIOHttpConnection(http_auth="username:secret") assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -182,7 +180,7 @@ async def test_http_auth(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_tuple(self): + async def test_http_auth_tuple(self) -> None: con = AIOHttpConnection(http_auth=("username", "secret")) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -191,7 +189,7 @@ async def test_http_auth_tuple(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_http_auth_list(self): + async def test_http_auth_list(self) -> None: con = AIOHttpConnection(http_auth=["username", "secret"]) assert { "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", @@ -200,7 +198,7 @@ async def test_http_auth_list(self): "user-agent": con._get_default_user_agent(), } == con.headers - async def test_uses_https_if_verify_certs_is_off(self): + async def test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection(use_ssl=True, verify_certs=False) assert 1 == len(w) @@ -213,7 +211,7 @@ async def test_uses_https_if_verify_certs_is_off(self): assert con.scheme == "https" assert con.host == "https://localhost:9200" - async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): + async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self) -> None: with warnings.catch_warnings(record=True) as w: con = AIOHttpConnection( use_ssl=True, verify_certs=False, ssl_show_warn=False @@ -223,17 +221,18 @@ async def test_nowarn_when_test_uses_https_if_verify_certs_is_off(self): assert isinstance(con.session, aiohttp.ClientSession) - async def test_doesnt_use_https_if_not_specified(self): + async def test_doesnt_use_https_if_not_specified(self) -> None: con = AIOHttpConnection() assert not con.use_ssl - async def test_no_warning_when_using_ssl_context(self): + async def test_no_warning_when_using_ssl_context(self) -> None: ctx = ssl.create_default_context() with warnings.catch_warnings(record=True) as w: AIOHttpConnection(ssl_context=ctx) assert w == [], str([x.message for x in w]) - async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): + async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + kwargs: Any for kwargs in ( {"ssl_show_warn": False}, {"ssl_show_warn": True}, @@ -256,32 +255,34 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs(self, load_verify_locations, tmp_path): + async def test_uses_given_ca_certs( + self, load_verify_locations: Any, tmp_path: Any + ) -> None: path = tmp_path / "ca_certs.pem" path.touch() AIOHttpConnection(use_ssl=True, ca_certs=str(path)) load_verify_locations.assert_called_once_with(cafile=str(path)) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations): + async def test_uses_default_ca_certs(self, load_verify_locations: Any) -> None: AIOHttpConnection(use_ssl=True) load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations): + async def test_uses_no_ca_certs(self, load_verify_locations: Any) -> None: AIOHttpConnection(use_ssl=True, verify_certs=False) load_verify_locations.assert_not_called() - async def test_trust_env(self): - con = AIOHttpConnection(trust_env=True) + async def test_trust_env(self) -> None: + con: Any = AIOHttpConnection(trust_env=True) await con._create_aiohttp_session() assert con._trust_env is True assert con.session.trust_env is True - async def test_trust_env_default_value_is_false(self): + async def test_trust_env_default_value_is_false(self) -> None: con = AIOHttpConnection() await con._create_aiohttp_session() @@ -289,7 +290,7 @@ async def test_trust_env_default_value_is_false(self): assert con.session.trust_env is False @patch("opensearchpy.connection.base.logger") - async def test_uncompressed_body_logged(self, logger): + async def test_uncompressed_body_logged(self, logger: Any) -> None: con = await self._get_mock_connection(connection_params={"http_compress": True}) await con.perform_request("GET", "/", body=b'{"example": "body"}') @@ -299,17 +300,17 @@ async def test_uncompressed_body_logged(self, logger): assert '> {"example": "body"}' == req[0][0] % req[0][1:] assert "< {}" == resp[0][0] % resp[0][1:] - async def test_surrogatepass_into_bytes(self): + async def test_surrogatepass_into_bytes(self) -> None: buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" con = await self._get_mock_connection(response_body=buf) status, headers, data = await con.perform_request("GET", "/") assert u"你好\uda6a" == data # fmt: skip - @pytest.mark.parametrize("exception_cls", reraise_exceptions) - async def test_recursion_error_reraised(self, exception_cls): + @pytest.mark.parametrize("exception_cls", reraise_exceptions) # type: ignore + async def test_recursion_error_reraised(self, exception_cls: Any) -> None: conn = AIOHttpConnection() - def request_raise(*_, **__): + def request_raise(*_: Any, **__: Any) -> Any: raise exception_cls("Wasn't modified!") await conn._create_aiohttp_session() @@ -319,7 +320,7 @@ def request_raise(*_, **__): await conn.perform_request("GET", "/") assert str(e.value) == "Wasn't modified!" - async def test_json_errors_are_parsed(self): + async def test_json_errors_are_parsed(self) -> None: con = await self._get_mock_connection( response_code=400, response_body=b'{"error": {"type": "snapshot_in_progress_exception"}}', @@ -337,23 +338,25 @@ async def test_json_errors_are_parsed(self): class TestConnectionHttpServer: """Tests the HTTP connection implementations against a live server E2E""" + server: Any + @classmethod - def setup_class(cls): + def setup_class(cls) -> None: # Start server cls.server = TestHTTPServer(port=8081) cls.server.start() @classmethod - def teardown_class(cls): + def teardown_class(cls) -> None: # Stop server cls.server.stop() - async def httpserver(self, conn, **kwargs): + async def httpserver(self, conn: Any, **kwargs: Any) -> Any: status, headers, data = await conn.perform_request("GET", "/", **kwargs) data = json.loads(data) return (status, data) - async def test_aiohttp_connection(self): + async def test_aiohttp_connection(self) -> None: # Defaults conn = AIOHttpConnection("localhost", port=8081, use_ssl=False) user_agent = conn._get_default_user_agent() @@ -413,13 +416,13 @@ async def test_aiohttp_connection(self): "User-Agent": user_agent, } - async def test_aiohttp_connection_error(self): + async def test_aiohttp_connection_error(self) -> None: conn = AIOHttpConnection("not.a.host.name") with pytest.raises(ConnectionError): await conn.perform_request("GET", "/") -async def test_default_connection_is_returned_by_default(): +async def test_default_connection_is_returned_by_default() -> None: c = async_connections.AsyncConnections() con, con2 = object(), object() @@ -430,7 +433,7 @@ async def test_default_connection_is_returned_by_default(): assert await c.get_connection() is con -async def test_get_connection_created_connection_if_needed(): +async def test_get_connection_created_connection_if_needed() -> None: c = async_connections.AsyncConnections() await c.configure( default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} @@ -443,7 +446,7 @@ async def test_get_connection_created_connection_if_needed(): assert [{"host": "localhost"}] == local.transport.hosts -async def test_configure_preserves_unchanged_connections(): +async def test_configure_preserves_unchanged_connections() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -462,7 +465,7 @@ async def test_configure_preserves_unchanged_connections(): assert new_default is not default -async def test_remove_connection_removes_both_conn_and_conf(): +async def test_remove_connection_removes_both_conn_and_conf() -> None: c = async_connections.AsyncConnections() await c.configure( @@ -479,7 +482,7 @@ async def test_remove_connection_removes_both_conn_and_conf(): await c.get_connection("default") -async def test_create_connection_constructs_client(): +async def test_create_connection_constructs_client() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) @@ -487,7 +490,7 @@ async def test_create_connection_constructs_client(): assert [{"host": "opensearch.com"}] == con.transport.hosts -async def test_create_connection_adds_our_serializer(): +async def test_create_connection_adds_our_serializer() -> None: c = async_connections.AsyncConnections() await c.create_connection("testing", hosts=["opensearch.com"]) result = await c.get_connection("testing") diff --git a/test_opensearchpy/test_async/test_helpers/__init__.py b/test_opensearchpy/test_async/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_helpers/conftest.py b/test_opensearchpy/test_async/test_helpers/conftest.py index 56a6bf31..bd1776ab 100644 --- a/test_opensearchpy/test_async/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_helpers/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,27 +9,30 @@ # GitHub history for details. +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock from pytest import fixture from opensearchpy.connection.async_connections import add_connection, async_connections -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -@fixture -async def mock_client(dummy_response): +@fixture # type: ignore +async def mock_client(dummy_response: Any) -> Any: client = Mock() client.search.return_value = dummy_response await add_connection("mock", client) yield client - async_connections._conn = {} + async_connections._conns = {} async_connections._kwargs = {} -@fixture -def dummy_response(): +@fixture # type: ignore +def dummy_response() -> Any: return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -76,8 +80,8 @@ def dummy_response(): } -@fixture -def aggs_search(): +@fixture # type: ignore +def aggs_search() -> Any: from opensearchpy._async.helpers.search import AsyncSearch s = AsyncSearch(index="flat-git") @@ -91,8 +95,8 @@ def aggs_search(): return s -@fixture -def aggs_data(): +@fixture # type: ignore +def aggs_data() -> Any: return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_async/test_helpers/test_document.py b/test_opensearchpy/test_async/test_helpers/test_document.py index 44aaf1b5..d6ef0128 100644 --- a/test_opensearchpy/test_async/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -14,8 +15,10 @@ import pickle from datetime import datetime from hashlib import sha256 +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import InnerDoc, MetaField, Range, analyzer @@ -25,29 +28,29 @@ from opensearchpy.exceptions import IllegalOperation, ValidationException from opensearchpy.helpers import field, utils -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class MyInner(InnerDoc): - old_field = field.Text() + old_field: Any = field.Text() class MyDoc(document.AsyncDocument): - title = field.Keyword() - name = field.Text() - created_at = field.Date() - inner = field.Object(MyInner) + title: Any = field.Keyword() + name: Any = field.Text() + created_at: Any = field.Date() + inner: Any = field.Object(MyInner) class MySubDoc(MyDoc): - name = field.Keyword() + name: Any = field.Keyword() class Index: name = "default-index" class MyDoc2(document.AsyncDocument): - extra = field.Long() + extra: Any = field.Long() class MyMultiSubDoc(MyDoc2, MySubDoc): @@ -55,19 +58,19 @@ class MyMultiSubDoc(MyDoc2, MySubDoc): class Comment(InnerDoc): - title = field.Text() - tags = field.Keyword(multi=True) + title: Any = field.Text() + tags: Any = field.Keyword(multi=True) class DocWithNested(document.AsyncDocument): - comments = field.Nested(Comment) + comments: Any = field.Nested(Comment) class Index: name = "test-doc-with-nested" class SimpleCommit(document.AsyncDocument): - files = field.Text(multi=True) + files: Any = field.Text(multi=True) class Index: name = "test-git" @@ -78,48 +81,54 @@ class Secret(str): class SecretField(field.CustomField): - builtin_type = "text" + builtin_type: Any = "text" - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return codecs.encode(data, "rot_13") - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Secret): return data return Secret(codecs.decode(data, "rot_13")) class SecretDoc(document.AsyncDocument): - title = SecretField(index="no") + title: Any = SecretField(index="no") class Index: name = "test-secret-doc" class NestedSecret(document.AsyncDocument): - secrets = field.Nested(SecretDoc) + secrets: Any = field.Nested(SecretDoc) class Index: name = "test-nested-secret" + _index: Any + class OptionalObjectWithRequiredField(document.AsyncDocument): - comments = field.Nested(properties={"title": field.Keyword(required=True)}) + comments: Any = field.Nested(properties={"title": field.Keyword(required=True)}) class Index: name = "test-required" + _index: Any + class Host(document.AsyncDocument): - ip = field.Ip() + ip: Any = field.Ip() class Index: name = "test-host" + _index: Any + -async def test_range_serializes_properly(): +async def test_range_serializes_properly() -> None: class D(document.AsyncDocument): - lr = field.LongRange() + lr: Any = field.LongRange() d = D(lr=Range(lt=42)) assert 40 in d.lr @@ -130,7 +139,7 @@ class D(document.AsyncDocument): assert {"lr": {"lt": 42}} == d.to_dict() -async def test_range_deserializes_properly(): +async def test_range_deserializes_properly() -> None: class D(InnerDoc): lr = field.LongRange() @@ -140,13 +149,13 @@ class D(InnerDoc): assert 47 not in d.lr -async def test_resolve_nested(): +async def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -async def test_conflicting_mapping_raises_error_in_index_to_dict(): +async def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.AsyncDocument): name = field.Text() @@ -161,18 +170,18 @@ class B(document.AsyncDocument): i.to_dict() -async def test_ip_address_serializes_properly(): +async def test_ip_address_serializes_properly() -> None: host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -async def test_matches_uses_index(): +async def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -async def test_matches_with_no_name_always_matches(): +async def test_matches_with_no_name_always_matches() -> None: class D(document.AsyncDocument): pass @@ -180,7 +189,7 @@ class D(document.AsyncDocument): assert D._matches({"_index": "whatever"}) -async def test_matches_accepts_wildcards(): +async def test_matches_accepts_wildcards() -> None: class MyDoc(document.AsyncDocument): class Index: name = "my-*" @@ -189,7 +198,7 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -async def test_assigning_attrlist_to_field(): +async def test_assigning_attrlist_to_field() -> None: sc = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) @@ -197,13 +206,13 @@ async def test_assigning_attrlist_to_field(): assert sc.to_dict()["files"] is ls -async def test_optional_inner_objects_are_not_validated_if_missing(): - d = OptionalObjectWithRequiredField() +async def test_optional_inner_objects_are_not_validated_if_missing() -> None: + d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None -async def test_custom_field(): +async def test_custom_field() -> None: s = SecretDoc(title=Secret("Hello")) assert {"title": "Uryyb"} == s.to_dict() @@ -214,13 +223,13 @@ async def test_custom_field(): assert isinstance(s.title, Secret) -async def test_custom_field_mapping(): +async def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -async def test_custom_field_in_nested(): +async def test_custom_field_in_nested() -> None: s = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) @@ -228,7 +237,7 @@ async def test_custom_field_in_nested(): assert s.secrets[0].title == "Hello" -async def test_multi_works_after_doc_has_been_saved(): +async def test_multi_works_after_doc_has_been_saved() -> None: c = SimpleCommit() c.full_clean() c.files.append("setup.py") @@ -236,7 +245,7 @@ async def test_multi_works_after_doc_has_been_saved(): assert c.to_dict() == {"files": ["setup.py"]} -async def test_multi_works_in_nested_after_doc_has_been_serialized(): +async def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 c = DocWithNested(comments=[Comment(title="First!")]) @@ -245,19 +254,21 @@ async def test_multi_works_in_nested_after_doc_has_been_serialized(): assert [] == c.comments[0].tags -async def test_null_value_for_object(): +async def test_null_value_for_object() -> None: d = MyDoc(inner=None) assert d.inner is None -async def test_inherited_doc_types_can_override_index(): +async def test_inherited_doc_types_can_override_index() -> None: class MyDocDifferentIndex(MySubDoc): + _index: Any + class Index: - name = "not-default-index" - settings = {"number_of_replicas": 0} - aliases = {"a": {}} - analyzers = [analyzer("my_analizer", tokenizer="keyword")] + name: Any = "not-default-index" + settings: Any = {"number_of_replicas": 0} + aliases: Any = {"a": {}} + analyzers: Any = [analyzer("my_analizer", tokenizer="keyword")] assert MyDocDifferentIndex._index._name == "not-default-index" assert MyDocDifferentIndex()._get_index() == "not-default-index" @@ -283,7 +294,7 @@ class Index: } -async def test_to_dict_with_meta(): +async def test_to_dict_with_meta() -> None: d = MySubDoc(title="hello") d.meta.routing = "some-parent" @@ -294,28 +305,28 @@ async def test_to_dict_with_meta(): } == d.to_dict(True) -async def test_to_dict_with_meta_includes_custom_index(): +async def test_to_dict_with_meta_includes_custom_index() -> None: d = MySubDoc(title="hello") d.meta.index = "other-index" assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -async def test_to_dict_without_skip_empty_will_include_empty_fields(): +async def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: d = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -async def test_attribute_can_be_removed(): +async def test_attribute_can_be_removed() -> None: d = MyDoc(title="hello") del d.title assert "title" not in d._d_ -async def test_doc_type_can_be_correctly_pickled(): +async def test_doc_type_can_be_correctly_pickled() -> None: d = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) @@ -330,7 +341,7 @@ async def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -async def test_meta_is_accessible_even_on_empty_doc(): +async def test_meta_is_accessible_even_on_empty_doc() -> None: d = MyDoc() d.meta @@ -338,7 +349,7 @@ async def test_meta_is_accessible_even_on_empty_doc(): d.meta -async def test_meta_field_mapping(): +async def test_meta_field_mapping() -> None: class User(document.AsyncDocument): username = field.Text() @@ -357,7 +368,7 @@ class Meta: } == User._doc_type.mapping.to_dict() -async def test_multi_value_fields(): +async def test_multi_value_fields() -> None: class Blog(document.AsyncDocument): tags = field.Keyword(multi=True) @@ -368,19 +379,19 @@ class Blog(document.AsyncDocument): assert ["search", "python"] == b.tags -async def test_docs_with_properties(): +async def test_docs_with_properties() -> None: class User(document.AsyncDocument): - pwd_hash = field.Text() + pwd_hash: Any = field.Text() - def check_password(self, pwd): + def check_password(self, pwd: Any) -> Any: return sha256(pwd).hexdigest() == self.pwd_hash @property - def password(self): + def password(self) -> Any: raise AttributeError("readonly") @password.setter - def password(self, pwd): + def password(self, pwd: Any) -> None: self.pwd_hash = sha256(pwd).hexdigest() u = User(pwd_hash=sha256(b"secret").hexdigest()) @@ -396,7 +407,7 @@ def password(self, pwd): u.password -async def test_nested_can_be_assigned_to(): +async def test_nested_can_be_assigned_to() -> None: d1 = DocWithNested(comments=[Comment(title="First!")]) d2 = DocWithNested() @@ -407,13 +418,13 @@ async def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -async def test_nested_can_be_none(): +async def test_nested_can_be_none() -> None: d = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -async def test_nested_defaults_to_list_and_can_be_updated(): +async def test_nested_defaults_to_list_and_can_be_updated() -> None: md = DocWithNested() assert [] == md.comments @@ -422,8 +433,8 @@ async def test_nested_defaults_to_list_and_can_be_updated(): assert {"comments": [{"title": "hello World!"}]} == md.to_dict() -async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): - md = MyDoc(name=["a", "b", "c"]) +async def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] assert isinstance(md.inner[0], MyInner) @@ -434,13 +445,13 @@ async def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -async def test_to_dict_ignores_empty_collections(): - md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) +async def test_to_dict_ignores_empty_collections() -> None: + md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() -async def test_declarative_mapping_definition(): +async def test_declarative_mapping_definition() -> None: assert issubclass(MyDoc, document.AsyncDocument) assert hasattr(MyDoc, "_doc_type") assert { @@ -453,7 +464,7 @@ async def test_declarative_mapping_definition(): } == MyDoc._doc_type.mapping.to_dict() -async def test_you_can_supply_own_mapping_instance(): +async def test_you_can_supply_own_mapping_instance() -> None: class MyD(document.AsyncDocument): title = field.Text() @@ -467,9 +478,9 @@ class Meta: } == MyD._doc_type.mapping.to_dict() -async def test_document_can_be_created_dynamically(): +async def test_document_can_be_created_dynamically() -> None: n = datetime.now() - md = MyDoc(title="hello") + md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" md.created_at = n @@ -488,14 +499,14 @@ async def test_document_can_be_created_dynamically(): } == md.to_dict() -async def test_invalid_date_will_raise_exception(): - md = MyDoc() +async def test_invalid_date_will_raise_exception() -> None: + md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): md.full_clean() -async def test_document_inheritance(): +async def test_document_inheritance() -> None: assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.AsyncDocument) assert hasattr(MySubDoc, "_doc_type") @@ -509,7 +520,7 @@ async def test_document_inheritance(): } == MySubDoc._doc_type.mapping.to_dict() -async def test_child_class_can_override_parent(): +async def test_child_class_can_override_parent() -> None: class A(document.AsyncDocument): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -527,8 +538,8 @@ class B(A): } == B._doc_type.mapping.to_dict() -async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): - md = MySubDoc(meta={"id": 42}, name="My First doc!") +async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: + md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" assert md.meta.index == "my-index" @@ -537,7 +548,7 @@ async def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): assert {"id": 42, "index": "my-index"} == md.meta.to_dict() -async def test_index_inheritance(): +async def test_index_inheritance() -> None: assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.AsyncDocument) @@ -554,33 +565,33 @@ async def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -async def test_meta_fields_can_be_set_directly_in_init(): +async def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() - md = MyDoc(_id=p, title="Hello World!") + md: Any = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -async def test_save_no_index(mock_client): - md = MyDoc() +async def test_save_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): await md.save(using="mock") -async def test_delete_no_index(mock_client): - md = MyDoc() +async def test_delete_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): await md.delete(using="mock") -async def test_update_no_fields(): - md = MyDoc() +async def test_update_no_fields() -> None: + md: Any = MyDoc() with raises(IllegalOperation): await md.update() -async def test_search_with_custom_alias_and_index(mock_client): - search_object = MyDoc.search( +async def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -588,8 +599,8 @@ async def test_search_with_custom_alias_and_index(mock_client): assert search_object._index == ["custom_index1", "custom_index2"] -async def test_from_opensearch_respects_underscored_non_meta_fields(): - doc = { +async def test_from_opensearch_respects_underscored_non_meta_fields() -> None: + doc: Any = { "_index": "test-index", "_id": "opensearch", "_score": 12.0, @@ -612,11 +623,11 @@ class Index: assert c._tagline == "You know, for search" -async def test_nested_and_object_inner_doc(): +async def test_nested_and_object_inner_doc() -> None: class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) - props = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] + props: Any = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] assert props == { "created_at": {"type": "date"}, "inner": {"properties": {"old_field": {"type": "text"}}, "type": "object"}, diff --git a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py index 88344cdb..c27bd3ea 100644 --- a/test_opensearchpy/test_async/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,13 +9,15 @@ # GitHub history for details. from datetime import datetime +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.faceted_search import AsyncFacetedSearch from opensearchpy.helpers.faceted_search import DateHistogramFacet, TermsFacet -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class BlogSearch(AsyncFacetedSearch): @@ -30,7 +33,7 @@ class BlogSearch(AsyncFacetedSearch): } -async def test_query_is_created_properly(): +async def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -53,7 +56,7 @@ async def test_query_is_created_properly(): } == s.to_dict() -async def test_query_is_created_properly_with_sort_tuple(): +async def test_query_is_created_properly_with_sort_tuple() -> None: bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -77,7 +80,7 @@ async def test_query_is_created_properly_with_sort_tuple(): } == s.to_dict() -async def test_filter_is_applied_to_search_but_not_relevant_facet(): +async def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -100,7 +103,7 @@ async def test_filter_is_applied_to_search_but_not_relevant_facet(): } == s.to_dict() -async def test_filters_are_applied_to_search_ant_relevant_facets(): +async def test_filters_are_applied_to_search_ant_relevant_facets() -> None: bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -134,13 +137,13 @@ async def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -async def test_date_histogram_facet_with_1970_01_01_date(): +async def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore ["interval_type", "interval"], [ ("interval", "year"), @@ -167,7 +170,7 @@ async def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -async def test_date_histogram_interval_types(interval_type, interval): +async def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -179,7 +182,7 @@ async def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -async def test_date_histogram_no_interval_keyerror(): +async def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_async/test_helpers/test_index.py b/test_opensearchpy/test_async/test_helpers/test_index.py index 4ba51ce2..e59d86ad 100644 --- a/test_opensearchpy/test_async/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,15 +10,17 @@ import string from random import choice +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Text, analyzer from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -25,7 +28,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_multiple_doc_types_will_combine_mappings(): +async def test_multiple_doc_types_will_combine_mappings() -> None: class User(AsyncDocument): username = Text() @@ -43,14 +46,14 @@ class User(AsyncDocument): } == i.to_dict() -async def test_search_is_limited_to_index_name(): +async def test_search_is_limited_to_index_name() -> None: i = AsyncIndex("my-index") s = i.search() assert s._index == ["my-index"] -async def test_cloned_index_has_copied_settings_and_using(): +async def test_cloned_index_has_copied_settings_and_using() -> None: client = object() i = AsyncIndex("my-index", using=client) i.settings(number_of_shards=1) @@ -63,7 +66,7 @@ async def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -async def test_cloned_index_has_analysis_attribute(): +async def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. @@ -83,7 +86,7 @@ async def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -async def test_settings_are_saved(): +async def test_settings_are_saved() -> None: i = AsyncIndex("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) @@ -91,7 +94,7 @@ async def test_settings_are_saved(): assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -async def test_registered_doc_type_included_in_to_dict(): +async def test_registered_doc_type_included_in_to_dict() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -105,7 +108,7 @@ async def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -async def test_registered_doc_type_included_in_search(): +async def test_registered_doc_type_included_in_search() -> None: i = AsyncIndex("i", using="alias") i.document(Post) @@ -114,9 +117,9 @@ async def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -async def test_aliases_add_to_object(): +async def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") index.aliases(**alias_dict) @@ -124,9 +127,9 @@ async def test_aliases_add_to_object(): assert index._aliases == alias_dict -async def test_aliases_returned_from_to_dict(): +async def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} index = AsyncIndex("i", using="alias") index.aliases(**alias_dict) @@ -134,7 +137,7 @@ async def test_aliases_returned_from_to_dict(): assert index._aliases == index.to_dict()["aliases"] == alias_dict -async def test_analyzers_added_to_object(): +async def test_analyzers_added_to_object() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -150,7 +153,7 @@ async def test_analyzers_added_to_object(): } -async def test_analyzers_returned_from_to_dict(): +async def test_analyzers_returned_from_to_dict() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" @@ -163,7 +166,7 @@ async def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -async def test_conflicting_analyzer_raises_error(): +async def test_conflicting_analyzer_raises_error() -> None: i = AsyncIndex("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) @@ -171,7 +174,7 @@ async def test_conflicting_analyzer_raises_error(): i.analyzer("my_analyzer", tokenizer="keyword", filter=["lowercase", "stop"]) -async def test_index_template_can_have_order(): +async def test_index_template_can_have_order() -> None: i = AsyncIndex("i-*") it = i.as_template("i", order=2) diff --git a/test_opensearchpy/test_async/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_helpers/test_mapping.py index a4fb2b24..797c295f 100644 --- a/test_opensearchpy/test_async/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -14,7 +15,7 @@ from opensearchpy.helpers import analysis -async def test_mapping_can_has_fields(): +async def test_mapping_can_has_fields() -> None: m = mapping.AsyncMapping() m.field("name", "text").field("tags", "keyword") @@ -23,7 +24,7 @@ async def test_mapping_can_has_fields(): } == m.to_dict() -async def test_mapping_update_is_recursive(): +async def test_mapping_update_is_recursive() -> None: m1 = mapping.AsyncMapping() m1.field("title", "text") m1.field("author", "object") @@ -56,7 +57,7 @@ async def test_mapping_update_is_recursive(): } == m1.to_dict() -async def test_properties_can_iterate_over_all_the_fields(): +async def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.AsyncMapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -66,7 +67,7 @@ async def test_properties_can_iterate_over_all_the_fields(): } -async def test_mapping_can_collect_all_analyzers_and_normalizers(): +async def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -139,7 +140,7 @@ async def test_mapping_can_collect_all_analyzers_and_normalizers(): assert json.loads(json.dumps(m.to_dict())) == m.to_dict() -async def test_mapping_can_collect_multiple_analyzers(): +async def test_mapping_can_collect_multiple_analyzers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -185,7 +186,7 @@ async def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -async def test_even_non_custom_analyzers_can_have_params(): +async def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.AsyncMapping() m.field("title", "text", analyzer=a1) @@ -195,14 +196,14 @@ async def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -async def test_resolve_field_can_resolve_multifields(): +async def test_resolve_field_can_resolve_multifields() -> None: m = mapping.AsyncMapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -async def test_resolve_nested(): +async def test_resolve_nested() -> None: m = mapping.AsyncMapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_async/test_helpers/test_search.py b/test_opensearchpy/test_async/test_helpers/test_search.py index 5df66804..1af617d7 100644 --- a/test_opensearchpy/test_async/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,8 +9,10 @@ # GitHub history for details. from copy import deepcopy +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy._async.helpers import search @@ -18,16 +21,16 @@ from opensearchpy.helpers import query from opensearchpy.helpers.query import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_expand__to_dot_is_respected(): +async def test_expand__to_dot_is_respected() -> None: s = search.AsyncSearch().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -async def test_execute_uses_cache(): +async def test_execute_uses_cache() -> None: s = search.AsyncSearch() r = object() s._response = r @@ -35,20 +38,20 @@ async def test_execute_uses_cache(): assert r is await s.execute() -async def test_cache_isnt_cloned(): +async def test_cache_isnt_cloned() -> None: s = search.AsyncSearch() s._response = object() assert not hasattr(s._clone(), "_response") -async def test_search_starts_with_no_query(): +async def test_search_starts_with_no_query() -> None: s = search.AsyncSearch() assert s.query._proxied is None -async def test_search_query_combines_query(): +async def test_search_query_combines_query() -> None: s = search.AsyncSearch() s2 = s.query("match", f=42) @@ -60,7 +63,7 @@ async def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -async def test_query_can_be_assigned_to(): +async def test_query_can_be_assigned_to() -> None: s = search.AsyncSearch() q = Q("match", title="python") @@ -69,7 +72,7 @@ async def test_query_can_be_assigned_to(): assert s.query._proxied is q -async def test_query_can_be_wrapped(): +async def test_query_can_be_wrapped() -> None: s = search.AsyncSearch().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -84,7 +87,7 @@ async def test_query_can_be_wrapped(): } == s.to_dict() -async def test_using(): +async def test_using() -> None: o = object() o2 = object() s = search.AsyncSearch(using=o) @@ -94,19 +97,19 @@ async def test_using(): assert s2._using is o2 -async def test_methods_are_proxied_to_the_query(): +async def test_methods_are_proxied_to_the_query() -> None: s = search.AsyncSearch().query("match_all") assert s.query.to_dict() == {"match_all": {}} -async def test_query_always_returns_search(): +async def test_query_always_returns_search() -> None: s = search.AsyncSearch() assert isinstance(s.query("match", f=42), search.AsyncSearch) -async def test_source_copied_on_clone(): +async def test_source_copied_on_clone() -> None: s = search.AsyncSearch().source(False) assert s._clone()._source == s._source assert s._clone()._source is False @@ -120,7 +123,7 @@ async def test_source_copied_on_clone(): assert s3._clone()._source == ["some", "fields"] -async def test_copy_clones(): +async def test_copy_clones() -> None: from copy import copy s1 = search.AsyncSearch().source(["some", "fields"]) @@ -130,7 +133,7 @@ async def test_copy_clones(): assert s1 is not s2 -async def test_aggs_allow_two_metric(): +async def test_aggs_allow_two_metric() -> None: s = search.AsyncSearch() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -140,7 +143,7 @@ async def test_aggs_allow_two_metric(): } -async def test_aggs_get_copied_on_change(): +async def test_aggs_get_copied_on_change() -> None: s = search.AsyncSearch().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" @@ -153,7 +156,7 @@ async def test_aggs_get_copied_on_change(): s4 = s3._clone() s4.aggs.metric("max_score", "max", field="score") - d = { + d: Any = { "query": {"match_all": {}}, "aggs": { "per_tag": { @@ -172,7 +175,7 @@ async def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -async def test_search_index(): +async def test_search_index() -> None: s = search.AsyncSearch(index="i") assert s._index == ["i"] s = s.index("i2") @@ -203,7 +206,7 @@ async def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -async def test_doc_type_document_class(): +async def test_doc_type_document_class() -> None: class MyDocument(AsyncDocument): pass @@ -216,7 +219,7 @@ class MyDocument(AsyncDocument): assert s._doc_type_map == {} -async def test_sort(): +async def test_sort() -> None: s = search.AsyncSearch() s = s.sort("fielda", "-fieldb") @@ -228,7 +231,7 @@ async def test_sort(): assert search.AsyncSearch().to_dict() == s.to_dict() -async def test_sort_by_score(): +async def test_sort_by_score() -> None: s = search.AsyncSearch() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -238,7 +241,7 @@ async def test_sort_by_score(): s.sort("-_score") -async def test_slice(): +async def test_slice() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -247,12 +250,12 @@ async def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -async def test_index(): +async def test_index() -> None: s = search.AsyncSearch() assert {"from": 3, "size": 1} == s[3].to_dict() -async def test_search_to_dict(): +async def test_search_to_dict() -> None: s = search.AsyncSearch() assert {} == s.to_dict() @@ -281,7 +284,7 @@ async def test_search_to_dict(): assert {"size": 5, "from": 42} == s.to_dict() -async def test_complex_example(): +async def test_complex_example() -> None: s = search.AsyncSearch() s = ( s.query("match", title="python") @@ -332,7 +335,7 @@ async def test_complex_example(): } == s.to_dict() -async def test_reverse(): +async def test_reverse() -> None: d = { "query": { "filtered": { @@ -382,13 +385,13 @@ async def test_reverse(): assert d == s.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: s = search.AsyncSearch.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -async def test_source(): +async def test_source() -> None: assert {} == search.AsyncSearch().source().to_dict() assert { @@ -404,7 +407,7 @@ async def test_source(): ).source(["f1", "f2"]).to_dict() -async def test_source_on_clone(): +async def test_source_on_clone() -> None: assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -419,7 +422,7 @@ async def test_source_on_clone(): } == search.AsyncSearch().source(False).filter("term", title="python").to_dict() -async def test_source_on_clear(): +async def test_source_on_clear() -> None: assert ( {} == search.AsyncSearch() @@ -429,7 +432,7 @@ async def test_source_on_clear(): ) -async def test_suggest_accepts_global_text(): +async def test_suggest_accepts_global_text() -> None: s = search.AsyncSearch.from_dict( { "suggest": { @@ -451,7 +454,7 @@ async def test_suggest_accepts_global_text(): } == s.to_dict() -async def test_suggest(): +async def test_suggest() -> None: s = search.AsyncSearch() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -460,7 +463,7 @@ async def test_suggest(): } == s.to_dict() -async def test_exclude(): +async def test_exclude() -> None: s = search.AsyncSearch() s = s.exclude("match", title="python") @@ -473,7 +476,7 @@ async def test_exclude(): } == s.to_dict() -async def test_update_from_dict(): +async def test_update_from_dict() -> None: s = search.AsyncSearch() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -484,7 +487,7 @@ async def test_update_from_dict(): } == s.to_dict() -async def test_rescore_query_to_dict(): +async def test_rescore_query_to_dict() -> None: s = search.AsyncSearch(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py index c535f15a..52fc20c3 100644 --- a/test_opensearchpy/test_async/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -10,21 +11,22 @@ from copy import deepcopy import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Q from opensearchpy._async.helpers import update_by_query from opensearchpy.helpers.response import UpdateByQueryResponse -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_ubq_starts_with_no_query(): +async def test_ubq_starts_with_no_query() -> None: ubq = update_by_query.AsyncUpdateByQuery() assert ubq.query._proxied is None -async def test_ubq_to_dict(): +async def test_ubq_to_dict() -> None: ubq = update_by_query.AsyncUpdateByQuery() assert {} == ubq.to_dict() @@ -42,7 +44,7 @@ async def test_ubq_to_dict(): assert {"extra_q": {"term": {"category": "conference"}}} == ubq.to_dict() -async def test_complex_example(): +async def test_complex_example() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -80,7 +82,7 @@ async def test_complex_example(): } == ubq.to_dict() -async def test_exclude(): +async def test_exclude() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -93,7 +95,7 @@ async def test_exclude(): } == ubq.to_dict() -async def test_reverse(): +async def test_reverse() -> None: d = { "query": { "filtered": { @@ -129,13 +131,13 @@ async def test_reverse(): assert d == ubq.to_dict() -async def test_from_dict_doesnt_need_query(): +async def test_from_dict_doesnt_need_query() -> None: ubq = update_by_query.AsyncUpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() -async def test_overwrite_script(): +async def test_overwrite_script() -> None: ubq = update_by_query.AsyncUpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -151,7 +153,7 @@ async def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -async def test_update_by_query_response_success(): +async def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_async/test_http_connection.py b/test_opensearchpy/test_async/test_http_connection.py index a362f451..febb231b 100644 --- a/test_opensearchpy/test_async/test_http_connection.py +++ b/test_opensearchpy/test_async/test_http_connection.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,40 +26,43 @@ # under the License. +from typing import Any + import mock import pytest +from _pytest.mark.structures import MarkDecorator from multidict import CIMultiDict -from opensearchpy._async._extra_imports import aiohttp +from opensearchpy._async._extra_imports import aiohttp # type: ignore from opensearchpy._async.compat import get_running_loop from opensearchpy.connection.http_async import AsyncHttpConnection -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncHttpConnection: - def test_auth_as_tuple(self): + def test_auth_as_tuple(self) -> None: c = AsyncHttpConnection(http_auth=("username", "password")) assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_string(self): + def test_auth_as_string(self) -> None: c = AsyncHttpConnection(http_auth="username:password") assert isinstance(c._http_auth, aiohttp.BasicAuth) assert c._http_auth.login, "username" assert c._http_auth.password, "password" - def test_auth_as_callable(self): - def auth_fn(): + def test_auth_as_callable(self) -> None: + def auth_fn() -> None: pass c = AsyncHttpConnection(http_auth=auth_fn) assert callable(c._http_auth) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_basicauth_in_request_session(self, mock_request): - async def do_request(*args, **kwargs): + async def test_basicauth_in_request_session(self, mock_request: Any) -> None: + async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() response_mock.status = 200 @@ -88,13 +92,13 @@ async def do_request(*args, **kwargs): ) @mock.patch("aiohttp.ClientSession.request", new_callable=mock.Mock) - async def test_callable_in_request_session(self, mock_request): - def auth_fn(*args, **kwargs): + async def test_callable_in_request_session(self, mock_request: Any) -> None: + def auth_fn(*args: Any, **kwargs: Any) -> Any: return { "Test": "PASSED", } - async def do_request(*args, **kwargs): + async def do_request(*args: Any, **kwargs: Any) -> Any: response_mock = mock.AsyncMock() response_mock.headers = CIMultiDict() response_mock.status = 200 diff --git a/test_opensearchpy/test_async/test_plugins_client.py b/test_opensearchpy/test_async/test_plugins_client.py new file mode 100644 index 00000000..d701892c --- /dev/null +++ b/test_opensearchpy/test_async/test_plugins_client.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from opensearchpy._async.client import AsyncOpenSearch + + +class TestPluginsClient(TestCase): + async def test_plugins_client(self) -> None: + with self.assertWarns(Warning) as w: + client = AsyncOpenSearch() + # testing double-init here + client.plugins.__init__(client) # type: ignore + self.assertEqual( + str(w.warnings[0].message), + "Cannot load `alerting` directly to AsyncOpenSearch as it already exists. Use `AsyncOpenSearch.plugin.alerting` instead.", + ) diff --git a/test_opensearchpy/test_async/test_server/__init__.py b/test_opensearchpy/test_async/test_server/__init__.py index a2ab2657..3541fdec 100644 --- a/test_opensearchpy/test_async/test_server/__init__.py +++ b/test_opensearchpy/test_async/test_server/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,7 +26,7 @@ # under the License. -from unittest import IsolatedAsyncioTestCase +from unittest import IsolatedAsyncioTestCase # type: ignore from opensearchpy._async.helpers.test import get_test_client from opensearchpy.connection.async_connections import add_connection @@ -33,14 +34,14 @@ from ...utils import wipe_cluster -class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): - async def asyncSetUp(self): +class AsyncOpenSearchTestCase(IsolatedAsyncioTestCase): # type: ignore + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: wipe_cluster(self.client) if self.client: await self.client.close() diff --git a/test_opensearchpy/test_async/test_server/conftest.py b/test_opensearchpy/test_async/test_server/conftest.py index 42c37edb..79952bc4 100644 --- a/test_opensearchpy/test_async/test_server/conftest.py +++ b/test_opensearchpy/test_async/test_server/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,26 +27,28 @@ import asyncio +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator import opensearchpy from opensearchpy.helpers.test import OPENSEARCH_URL from ...utils import wipe_cluster -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -@pytest.fixture(scope="function") -async def async_client(): +@pytest.fixture(scope="function") # type: ignore +async def async_client() -> Any: client = None try: if not hasattr(opensearchpy, "AsyncOpenSearch"): pytest.skip("test requires 'AsyncOpenSearch'") kw = {"timeout": 3} - client = opensearchpy.AsyncOpenSearch(OPENSEARCH_URL, **kw) + client = opensearchpy.AsyncOpenSearch(OPENSEARCH_URL, **kw) # type: ignore # wait for yellow status for _ in range(100): diff --git a/test_opensearchpy/test_async/test_server/test_clients.py b/test_opensearchpy/test_async/test_server/test_clients.py index 17104312..323532c5 100644 --- a/test_opensearchpy/test_async/test_server/test_clients.py +++ b/test_opensearchpy/test_async/test_server/test_clients.py @@ -28,25 +28,28 @@ from __future__ import unicode_literals +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestUnicode: - async def test_indices_analyze(self, async_client): + async def test_indices_analyze(self, async_client: Any) -> None: await async_client.indices.analyze(body='{"text": "привет"}') class TestBulk: - async def test_bulk_works_with_string_body(self, async_client): + async def test_bulk_works_with_string_body(self, async_client: Any) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) assert response["errors"] is False assert len(response["items"]) == 1 - async def test_bulk_works_with_bytestring_body(self, async_client): + async def test_bulk_works_with_bytestring_body(self, async_client: Any) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = await async_client.bulk(body=docs) @@ -56,8 +59,8 @@ async def test_bulk_works_with_bytestring_body(self, async_client): class TestYarlMissing: async def test_aiohttp_connection_works_without_yarl( - self, async_client, monkeypatch - ): + self, async_client: Any, monkeypatch: Any + ) -> None: # This is a defensive test case for if aiohttp suddenly stops using yarl. from opensearchpy._async import http_aiohttp diff --git a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py index d5901d68..69282ead 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,6 +10,7 @@ import re from datetime import datetime +from typing import Any import pytest from pytest import fixture @@ -33,32 +35,32 @@ pytestmark = pytest.mark.asyncio -@fixture(scope="function") -async def client(): +@fixture(scope="function") # type: ignore +async def client() -> Any: client = await get_test_client(verify_certs=False, http_auth=("admin", "admin")) await add_connection("default", client) return client -@fixture(scope="function") -async def opensearch_version(client): +@fixture(scope="function") # type: ignore +async def opensearch_version(client: Any) -> Any: info = await client.info() print(info) yield tuple( int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") + for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore ) -@fixture -async def write_client(client): +@fixture # type: ignore +async def write_client(client: Any) -> Any: yield client await client.indices.delete("test-*", ignore=404) await client.indices.delete_template("test-template", ignore=404) -@fixture -async def data_client(client): +@fixture # type: ignore +async def data_client(client: Any) -> Any: # create mappings await create_git_index(client, "git") await create_flat_git_index(client, "flat-git") @@ -70,8 +72,8 @@ async def data_client(client): await client.indices.delete("flat-git", ignore=404) -@fixture -async def pull_request(write_client): +@fixture # type: ignore +async def pull_request(write_client: Any) -> Any: await PullRequest.init() pr = PullRequest( _id=42, @@ -94,8 +96,8 @@ async def pull_request(write_client): return pr -@fixture -async def setup_ubq_tests(client): +@fixture # type: ignore +async def setup_ubq_tests(client: Any) -> str: index = "test-git" await create_git_index(client, index) await async_bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py index 7355c71d..3608d935 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,6 +27,7 @@ import asyncio +from typing import Any, List import pytest from mock import MagicMock, patch @@ -38,24 +40,27 @@ class AsyncMock(MagicMock): - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: return super(AsyncMock, self).__call__(*args, **kwargs) - def __await__(self): + def __await__(self) -> Any: return self().__await__() class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client: Any, + fail_at: Any = (2,), + fail_with: TransportError = TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at self.transport = client.transport self._fail_with = fail_with - async def bulk(self, *args, **kwargs): + async def bulk(self, *args: Any, **kwargs: Any) -> Any: self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -63,7 +68,7 @@ async def bulk(self, *args, **kwargs): class TestStreamingBulk(object): - async def test_actions_remain_unchanged(self, async_client): + async def test_actions_remain_unchanged(self, async_client: Any) -> None: actions1 = [{"_id": 1}, {"_id": 2}] async for ok, item in actions.async_streaming_bulk( async_client, actions1, index="test-index" @@ -71,7 +76,7 @@ async def test_actions_remain_unchanged(self, async_client): assert ok assert [{"_id": 1}, {"_id": 2}] == actions1 - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client: Any) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] async for ok, item in actions.async_streaming_bulk( async_client, docs, index="test-index", refresh=True @@ -83,13 +88,13 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] - async def test_documents_data_types(self, async_client): - async def async_gen(): + async def test_documents_data_types(self, async_client: Any) -> None: + async def async_gen() -> Any: for x in range(100): await asyncio.sleep(0) yield {"answer": x, "_id": x} - def sync_gen(): + def sync_gen() -> Any: for x in range(100): yield {"answer": x, "_id": x} @@ -117,7 +122,9 @@ def sync_gen(): "_source" ] - async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): + async def test_all_errors_from_chunk_are_raised_on_failure( + self, async_client: Any + ) -> None: await async_client.indices.create( "i", { @@ -137,7 +144,7 @@ async def test_all_errors_from_chunk_are_raised_on_failure(self, async_client): else: assert False, "exception should have been raised" - async def test_different_op_types(self, async_client): + async def test_different_op_types(self, async_client: Any) -> None: await async_client.index(index="i", id=45, body={}) await async_client.index(index="i", id=42, body={}) docs = [ @@ -152,7 +159,7 @@ async def test_different_op_types(self, async_client): assert {"answer": 42} == (await async_client.get(index="i", id=42))["_source"] assert {"f": "v"} == (await async_client.get(index="i", id=47))["_source"] - async def test_transport_error_can_becaught(self, async_client): + async def test_transport_error_can_becaught(self, async_client: Any) -> None: failing_client = FailingBulkClient(async_client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -186,7 +193,7 @@ async def test_transport_error_can_becaught(self, async_client): } } == results[1][1] - async def test_rejected_documents_are_retried(self, async_client): + async def test_rejected_documents_are_retried(self, async_client: Any) -> None: failing_client = FailingBulkClient( async_client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -215,8 +222,8 @@ async def test_rejected_documents_are_retried(self, async_client): assert 4 == failing_client._called async def test_rejected_documents_are_retried_at_most_max_retries_times( - self, async_client - ): + self, async_client: Any + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -245,14 +252,16 @@ async def test_rejected_documents_are_retried_at_most_max_retries_times( assert {"value": 2, "relation": "eq"} == res["hits"]["total"] assert 4 == failing_client._called - async def test_transport_error_is_raised_with_max_retries(self, async_client): + async def test_transport_error_is_raised_with_max_retries( + self, async_client: Any + ) -> None: failing_client = FailingBulkClient( async_client, fail_at=(1, 2, 3, 4), fail_with=TransportError(429, "Rejected!", {}), ) - async def streaming_bulk(): + async def streaming_bulk() -> Any: results = [ x async for x in actions.async_streaming_bulk( @@ -271,7 +280,7 @@ async def streaming_bulk(): class TestBulk(object): - async def test_bulk_works_with_single_item(self, async_client): + async def test_bulk_works_with_single_item(self, async_client: Any) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -284,7 +293,7 @@ async def test_bulk_works_with_single_item(self, async_client): "_source" ] - async def test_all_documents_get_inserted(self, async_client): + async def test_all_documents_get_inserted(self, async_client: Any) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True @@ -297,7 +306,7 @@ async def test_all_documents_get_inserted(self, async_client): "_source" ] - async def test_stats_only_reports_numbers(self, async_client): + async def test_stats_only_reports_numbers(self, async_client: Any) -> None: docs = [{"answer": x} for x in range(100)] success, failed = await actions.async_bulk( async_client, docs, index="test-index", refresh=True, stats_only=True @@ -307,7 +316,7 @@ async def test_stats_only_reports_numbers(self, async_client): assert 0 == failed assert 100 == (await async_client.count(index="test-index"))["count"] - async def test_errors_are_reported_correctly(self, async_client): + async def test_errors_are_reported_correctly(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -324,6 +333,7 @@ async def test_errors_are_reported_correctly(self, async_client): raise_on_error=False, ) assert 1 == success + assert isinstance(failed, List) assert 1 == len(failed) error = failed[0] assert "42" == error["index"]["_id"] @@ -333,7 +343,7 @@ async def test_errors_are_reported_correctly(self, async_client): error["index"]["error"] ) or "mapper_parsing_exception" in repr(error["index"]["error"]) - async def test_error_is_raised(self, async_client): + async def test_error_is_raised(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -346,7 +356,7 @@ async def test_error_is_raised(self, async_client): with pytest.raises(BulkIndexError): await actions.async_bulk(async_client, [{"a": 42}, {"a": "c"}], index="i") - async def test_ignore_error_if_raised(self, async_client): + async def test_ignore_error_if_raised(self, async_client: Any) -> None: # ignore the status code 400 in tuple await actions.async_bulk( async_client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -379,7 +389,7 @@ async def test_ignore_error_if_raised(self, async_client): failing_client, [{"a": 42}], index="i", ignore_status=(599,) ) - async def test_errors_are_collected_properly(self, async_client): + async def test_errors_are_collected_properly(self, async_client: Any) -> None: await async_client.indices.create( "i", { @@ -401,10 +411,12 @@ async def test_errors_are_collected_properly(self, async_client): class MockScroll: - def __init__(self): + calls: Any + + def __init__(self) -> None: self.calls = [] - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: self.calls.append((args, kwargs)) if len(self.calls) == 1: return { @@ -423,25 +435,27 @@ async def __call__(self, *args, **kwargs): class MockResponse: - def __init__(self, resp): + def __init__(self, resp: Any) -> None: self.resp = resp - async def __call__(self, *args, **kwargs): + async def __call__(self, *args: Any, **kwargs: Any) -> Any: return self.resp - def __await__(self): + def __await__(self) -> Any: return self().__await__() -@pytest.fixture(scope="function") -async def scan_teardown(async_client): +@pytest.fixture(scope="function") # type: ignore +async def scan_teardown(async_client: Any) -> Any: yield await async_client.clear_scroll(scroll_id="_all") class TestScan(object): - async def test_order_can_be_preserved(self, async_client, scan_teardown): - bulk = [] + async def test_order_can_be_preserved( + self, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -461,8 +475,10 @@ async def test_order_can_be_preserved(self, async_client, scan_teardown): assert list(map(str, range(100))) == list(d["_id"] for d in docs) assert list(range(100)) == list(d["_source"]["answer"] for d in docs) - async def test_all_documents_are_read(self, async_client, scan_teardown): - bulk = [] + async def test_all_documents_are_read( + self, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -477,8 +493,8 @@ async def test_all_documents_are_read(self, async_client, scan_teardown): assert set(map(str, range(100))) == set(d["_id"] for d in docs) assert set(range(100)) == set(d["_source"]["answer"] for d in docs) - async def test_scroll_error(self, async_client, scan_teardown): - bulk = [] + async def test_scroll_error(self, async_client: Any, scan_teardown: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -513,7 +529,9 @@ async def test_scroll_error(self, async_client, scan_teardown): assert len(data) == 3 assert data[-1] == {"scroll_data": 42} - async def test_initial_search_error(self, async_client, scan_teardown): + async def test_initial_search_error( + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object(async_client, "clear_scroll", new_callable=AsyncMock): with patch.object( async_client, @@ -563,7 +581,9 @@ async def test_initial_search_error(self, async_client, scan_teardown): assert data == [{"search_data": 1}] assert mock_scroll.calls == [] - async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): + async def test_no_scroll_id_fast_route( + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object(async_client, "search", MockResponse({"no": "_scroll_id"})): with patch.object(async_client, "scroll") as scroll_mock: with patch.object(async_client, "clear_scroll") as clear_mock: @@ -579,8 +599,10 @@ async def test_no_scroll_id_fast_route(self, async_client, scan_teardown): clear_mock.assert_not_called() @patch("opensearchpy._async.helpers.actions.logger") - async def test_logger(self, logger_mock, async_client, scan_teardown): - bulk = [] + async def test_logger( + self, logger_mock: Any, async_client: Any, scan_teardown: Any + ) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -620,8 +642,8 @@ async def test_logger(self, logger_mock, async_client, scan_teardown): 5, ) - async def test_clear_scroll(self, async_client, scan_teardown): - bulk = [] + async def test_clear_scroll(self, async_client: Any, scan_teardown: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -656,7 +678,7 @@ async def test_clear_scroll(self, async_client, scan_teardown): ] spy.assert_not_called() - @pytest.mark.parametrize( + @pytest.mark.parametrize( # type: ignore "kwargs", [ {"api_key": ("name", "value")}, @@ -665,8 +687,8 @@ async def test_clear_scroll(self, async_client, scan_teardown): ], ) async def test_scan_auth_kwargs_forwarded( - self, async_client, scan_teardown, kwargs - ): + self, async_client: Any, scan_teardown: Any, kwargs: Any + ) -> None: ((key, val),) = kwargs.items() with patch.object( @@ -707,8 +729,8 @@ async def test_scan_auth_kwargs_forwarded( assert api_mock.call_args[1][key] == val async def test_scan_auth_kwargs_favor_scroll_kwargs_option( - self, async_client, scan_teardown - ): + self, async_client: Any, scan_teardown: Any + ) -> None: with patch.object( async_client, "search", @@ -756,9 +778,9 @@ async def test_scan_auth_kwargs_favor_scroll_kwargs_option( assert async_client.scroll.call_args[1]["sort"] == "asc" -@pytest.fixture(scope="function") -async def reindex_setup(async_client): - bulk = [] +@pytest.fixture(scope="function") # type: ignore +async def reindex_setup(async_client: Any) -> Any: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append( @@ -774,8 +796,8 @@ async def reindex_setup(async_client): class TestReindex(object): async def test_reindex_passes_kwargs_to_scan_and_bulk( - self, async_client, reindex_setup - ): + self, async_client: Any, reindex_setup: Any + ) -> None: await actions.async_reindex( async_client, "test_index", @@ -794,7 +816,9 @@ async def test_reindex_passes_kwargs_to_scan_and_bulk( await async_client.get(index="prod_index", id=42) )["_source"] - async def test_reindex_accepts_a_query(self, async_client, reindex_setup): + async def test_reindex_accepts_a_query( + self, async_client: Any, reindex_setup: Any + ) -> None: await actions.async_reindex( async_client, "test_index", @@ -813,7 +837,9 @@ async def test_reindex_accepts_a_query(self, async_client, reindex_setup): await async_client.get(index="prod_index", id=42) )["_source"] - async def test_all_documents_get_moved(self, async_client, reindex_setup): + async def test_all_documents_get_moved( + self, async_client: Any, reindex_setup: Any + ) -> None: await actions.async_reindex(async_client, "test_index", "prod_index") await async_client.indices.refresh() @@ -834,8 +860,8 @@ async def test_all_documents_get_moved(self, async_client, reindex_setup): )["_source"] -@pytest.fixture(scope="function") -async def parent_reindex_setup(async_client): +@pytest.fixture(scope="function") # type: ignore +async def parent_reindex_setup(async_client: Any) -> None: body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -864,8 +890,8 @@ async def parent_reindex_setup(async_client): class TestParentChildReindex: async def test_children_are_reindexed_correctly( - self, async_client, parent_reindex_setup - ): + self, async_client: Any, parent_reindex_setup: Any + ) -> None: await actions.async_reindex(async_client, "test-index", "real-index") assert {"question_answer": "question"} == ( await async_client.get(index="real-index", id=42) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py index d513bcff..7a23b8b1 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_data.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,8 +10,10 @@ from __future__ import unicode_literals +from typing import Any, Dict -async def create_flat_git_index(client, index): + +async def create_flat_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -53,7 +56,7 @@ async def create_flat_git_index(client, index): ) -async def create_git_index(client, index): +async def create_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -947,7 +950,7 @@ async def create_git_index(client, index): ], "committer": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "stats": {"deletions": 0, "insertions": 53, "lines": 53, "files": 2}, - "description": "From_dict, Q(dict) and bool query parses it's subqueries", + "description": "From_dict, Q(dict) and bool query parses its subqueries", "author": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "parent_shas": ["d407f99d1959b7b862a541c066d9fd737ce913f3"], "committed_date": "2014-03-06T20:24:30", @@ -1075,7 +1078,7 @@ async def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d: Any) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1084,7 +1087,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d: Any) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py index 172dfbfc..8e4e95e2 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,6 +10,7 @@ from datetime import datetime from ipaddress import ip_address +from typing import Any, Optional import pytest from pytest import raises @@ -62,7 +64,7 @@ class Repository(AsyncDocument): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Optional[str] = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -115,7 +117,7 @@ class Index: name = "test-serialization" -async def test_serialization(write_client): +async def test_serialization(write_client: Any) -> None: await SerializationDoc.init() await write_client.index( index="test-serialization", @@ -128,7 +130,7 @@ async def test_serialization(write_client): "ip": ["::1", "127.0.0.1", None], }, ) - sd = await SerializationDoc.get(id=42) + sd: Any = await SerializationDoc.get(id=42) assert sd.i == [1, 2, 3, None] assert sd.b == [True, False, True, False, None] @@ -145,7 +147,7 @@ async def test_serialization(write_client): } -async def test_nested_inner_hits_are_wrapped_properly(pull_request): +async def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: history_query = Q( "nested", path="comments.history", @@ -173,7 +175,7 @@ async def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -async def test_nested_inner_hits_are_deserialized_properly(pull_request): +async def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -188,7 +190,7 @@ async def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -async def test_nested_top_hits_are_wrapped_properly(pull_request): +async def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -200,7 +202,7 @@ async def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -async def test_update_object_field(write_client): +async def test_update_object_field(write_client: Any) -> None: await Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -220,7 +222,7 @@ async def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -async def test_update_script(write_client): +async def test_update_script(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -230,7 +232,7 @@ async def test_update_script(write_client): assert w.views == 47 -async def test_update_retry_on_conflict(write_client): +async def test_update_retry_on_conflict(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -248,8 +250,10 @@ async def test_update_retry_on_conflict(write_client): assert w.views == 52 -@pytest.mark.parametrize("retry_on_conflict", [None, 0]) -async def test_update_conflicting_version(write_client, retry_on_conflict): +@pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore +async def test_update_conflicting_version( + write_client: Any, retry_on_conflict: bool +) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) await w.save() @@ -266,7 +270,7 @@ async def test_update_conflicting_version(write_client, retry_on_conflict): ) -async def test_save_and_update_return_doc_meta(write_client): +async def test_save_and_update_return_doc_meta(write_client: Any) -> None: await Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = await w.save(return_doc_meta=True) @@ -290,31 +294,33 @@ async def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -async def test_init(write_client): +async def test_init(write_client: Any) -> None: await Repository.init(index="test-git") assert await write_client.indices.exists(index="test-git") -async def test_get_raises_404_on_index_missing(data_client): +async def test_get_raises_404_on_index_missing(data_client: Any) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php", index="not-there") -async def test_get_raises_404_on_non_existent_id(data_client): +async def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: with raises(NotFoundError): await Repository.get("opensearch-dsl-php") -async def test_get_returns_none_if_404_ignored(data_client): +async def test_get_returns_none_if_404_ignored(data_client: Any) -> None: assert None is await Repository.get("opensearch-dsl-php", ignore=404) -async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +async def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( + data_client: Any, +) -> None: assert None is await Repository.get("42", index="not-there", ignore=404) -async def test_get(data_client): +async def test_get(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -322,15 +328,15 @@ async def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -async def test_exists_return_true(data_client): +async def test_exists_return_true(data_client: Any) -> None: assert await Repository.exists("opensearch-py") -async def test_exists_false(data_client): +async def test_exists_false(data_client: Any) -> None: assert not await Repository.exists("opensearch-dsl-php") -async def test_get_with_tz_date(data_client): +async def test_get_with_tz_date(data_client: Any) -> None: first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -342,7 +348,7 @@ async def test_get_with_tz_date(data_client): ) -async def test_save_with_tz_date(data_client): +async def test_save_with_tz_date(data_client: Any) -> None: tzinfo = timezone("Europe/Prague") first_commit = await Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -369,7 +375,7 @@ async def test_save_with_tz_date(data_client): ] -async def test_mget(data_client): +async def test_mget(data_client: Any) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -377,23 +383,27 @@ async def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +async def test_mget_raises_exception_when_missing_param_is_invalid( + data_client: Any, +) -> None: with raises(ValueError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -async def test_mget_raises_404_when_missing_param_is_raise(data_client): +async def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: with raises(NotFoundError): await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -async def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +async def test_mget_ignores_missing_docs_when_missing_param_is_skip( + data_client: Any, +) -> None: commits = await Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -async def test_update_works_from_search_response(data_client): +async def test_update_works_from_search_response(data_client: Any) -> None: opensearch_repo = (await Repository.search().execute())[0] await opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -404,7 +414,7 @@ async def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -async def test_update(data_client): +async def test_update(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -428,7 +438,7 @@ async def test_update(data_client): assert "primary_term" in new_version.meta -async def test_save_updates_existing_doc(data_client): +async def test_save_updates_existing_doc(data_client: Any) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -441,7 +451,9 @@ async def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -async def test_save_automatically_uses_seq_no_and_primary_term(data_client): +async def test_save_automatically_uses_seq_no_and_primary_term( + data_client: Any, +) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -449,7 +461,9 @@ async def test_save_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.save() -async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +async def test_delete_automatically_uses_seq_no_and_primary_term( + data_client: Any, +) -> None: opensearch_repo = await Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -457,13 +471,13 @@ async def test_delete_automatically_uses_seq_no_and_primary_term(data_client): await opensearch_repo.delete() -async def assert_doc_equals(expected, actual): +async def assert_doc_equals(expected: Any, actual: Any) -> None: async for f in aiter(expected): assert f in actual assert actual[f] == expected[f] -async def test_can_save_to_different_index(write_client): +async def test_can_save_to_different_index(write_client: Any) -> None: test_repo = Repository(description="testing", meta={"id": 42}) assert await test_repo.save(index="test-document") @@ -478,7 +492,9 @@ async def test_can_save_to_different_index(write_client): ) -async def test_save_without_skip_empty_will_include_empty_fields(write_client): +async def test_save_without_skip_empty_will_include_empty_fields( + write_client: Any, +) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert await test_repo.save(index="test-document", skip_empty=False) @@ -493,7 +509,7 @@ async def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -async def test_delete(write_client): +async def test_delete(write_client: Any) -> None: await write_client.create( index="test-document", id="opensearch-py", @@ -514,11 +530,11 @@ async def test_delete(write_client): ) -async def test_search(data_client): +async def test_search(data_client: Any) -> None: assert await Repository.search().count() == 1 -async def test_search_returns_proper_doc_classes(data_client): +async def test_search_returns_proper_doc_classes(data_client: Any) -> None: result = await Repository.search().execute() opensearch_repo = result.hits[0] @@ -527,8 +543,10 @@ async def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -async def test_refresh_mapping(data_client): +async def test_refresh_mapping(data_client: Any) -> None: class Commit(AsyncDocument): + _index: Any + class Index: name = "git" @@ -541,7 +559,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -async def test_highlight_in_meta(data_client): +async def test_highlight_in_meta(data_client: Any) -> None: commit = ( await Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py index ab8ae552..b03fefe8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -8,8 +9,10 @@ # GitHub history for details. from datetime import datetime +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import A, Boolean, Date, Keyword from opensearchpy._async.helpers.document import AsyncDocument @@ -24,7 +27,7 @@ PullRequest, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repos(AsyncDocument): @@ -52,8 +55,8 @@ class MetricSearch(AsyncFacetedSearch): } -@pytest.fixture(scope="function") -def commit_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def commit_search_cls(opensearch_version: Any) -> Any: interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(AsyncFacetedSearch): @@ -77,8 +80,8 @@ class CommitSearch(AsyncFacetedSearch): return CommitSearch -@pytest.fixture(scope="function") -def repo_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def repo_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class RepoSearch(AsyncFacetedSearch): @@ -91,15 +94,15 @@ class RepoSearch(AsyncFacetedSearch): ), } - def search(self): + def search(self) -> Any: s = super(RepoSearch, self).search() return s.filter("term", commit_repo="repo") return RepoSearch -@pytest.fixture(scope="function") -def pr_search_cls(opensearch_version): +@pytest.fixture(scope="function") # type: ignore +def pr_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class PRSearch(AsyncFacetedSearch): @@ -117,7 +120,7 @@ class PRSearch(AsyncFacetedSearch): return PRSearch -async def test_facet_with_custom_metric(data_client): +async def test_facet_with_custom_metric(data_client: Any) -> None: ms = MetricSearch() r = await ms.execute() @@ -126,7 +129,7 @@ async def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -async def test_nested_facet(pull_request, pr_search_cls): +async def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls() r = await prs.execute() @@ -134,7 +137,7 @@ async def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -async def test_nested_facet_with_filter(pull_request, pr_search_cls): +async def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = await prs.execute() @@ -146,7 +149,7 @@ async def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -async def test_datehistogram_facet(data_client, repo_search_cls): +async def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = await rs.execute() @@ -154,7 +157,7 @@ async def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -async def test_boolean_facet(data_client, repo_search_cls): +async def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = await rs.execute() @@ -165,8 +168,8 @@ async def test_boolean_facet(data_client, repo_search_cls): async def test_empty_search_finds_everything( - data_client, opensearch_version, commit_search_cls -): + data_client: Any, opensearch_version: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls() r = await cs.execute() assert r.hits.total.value == 52 @@ -211,8 +214,8 @@ async def test_empty_search_finds_everything( async def test_term_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls -): + data_client: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = await cs.execute() @@ -257,8 +260,8 @@ async def test_term_filters_are_shown_as_selected_and_data_is_filtered( async def test_range_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls -): + data_client: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = await cs.execute() @@ -266,7 +269,7 @@ async def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -async def test_pagination(data_client, commit_search_cls): +async def test_pagination(data_client: Any, commit_search_cls: Any) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py index 26f452ca..14b87e15 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,14 +8,17 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import Date, Text from opensearchpy._async.helpers.document import AsyncDocument from opensearchpy._async.helpers.index import AsyncIndex, AsyncIndexTemplate from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Post(AsyncDocument): @@ -22,7 +26,7 @@ class Post(AsyncDocument): published_from = Date() -async def test_index_template_works(write_client): +async def test_index_template_works(write_client: Any) -> None: it = AsyncIndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -43,7 +47,7 @@ async def test_index_template_works(write_client): } == await write_client.indices.get_mapping(index="test-blog") -async def test_index_can_be_saved_even_with_settings(write_client): +async def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: i = AsyncIndex("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) await i.save() @@ -58,12 +62,14 @@ async def test_index_can_be_saved_even_with_settings(write_client): ) -async def test_index_exists(data_client): +async def test_index_exists(data_client: Any) -> None: assert await AsyncIndex("git").exists() assert not await AsyncIndex("not-there").exists() -async def test_index_can_be_created_with_settings_and_mappings(write_client): +async def test_index_can_be_created_with_settings_and_mappings( + write_client: Any, +) -> None: i = AsyncIndex("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -88,7 +94,7 @@ async def test_index_can_be_created_with_settings_and_mappings(write_client): } -async def test_delete(write_client): +async def test_delete(write_client: Any) -> None: await write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -99,9 +105,9 @@ async def test_delete(write_client): assert not await write_client.indices.exists(index="test-index") -async def test_multiple_indices_with_same_doc_type_work(write_client): - i1 = AsyncIndex("test-index-1", using=write_client) - i2 = AsyncIndex("test-index-2", using=write_client) +async def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: + i1: Any = AsyncIndex("test-index-1", using=write_client) + i2: Any = AsyncIndex("test-index-2", using=write_client) for i in i1, i2: i.document(Post) diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py index c05fd0ec..35a4e8d8 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,17 +8,20 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import exceptions from opensearchpy._async.helpers import mapping from opensearchpy.helpers import analysis -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_mapping_saved_into_opensearch(write_client): +async def test_mapping_saved_into_opensearch(write_client: Any) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -38,8 +42,8 @@ async def test_mapping_saved_into_opensearch(write_client): async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( - write_client, -): + write_client: Any, +) -> None: m = mapping.AsyncMapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -63,8 +67,8 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_closed( async def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( - write_client, -): + write_client: Any, +) -> None: m = mapping.AsyncMapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -93,7 +97,7 @@ async def test_mapping_saved_into_opensearch_when_index_already_exists_with_anal } == await write_client.indices.get_mapping(index="test-mapping") -async def test_mapping_gets_updated_from_opensearch(write_client): +async def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: await write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py index 54889dc8..8431fa4a 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -9,7 +10,10 @@ from __future__ import unicode_literals +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator from pytest import raises from opensearchpy import Date, Keyword, Q, Text, TransportError @@ -18,7 +22,7 @@ from opensearchpy.helpers.response import aggs from test_opensearchpy.test_async.test_server.test_helpers.test_data import FLAT_DATA -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class Repository(AsyncDocument): @@ -27,7 +31,7 @@ class Repository(AsyncDocument): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -39,7 +43,7 @@ class Index: name = "flat-git" -async def test_filters_aggregation_buckets_are_accessible(data_client): +async def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -60,7 +64,7 @@ async def test_filters_aggregation_buckets_are_accessible(data_client): ) -async def test_top_hits_are_wrapped_in_response(data_client): +async def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -76,7 +80,7 @@ async def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -async def test_inner_hits_are_wrapped_in_response(data_client): +async def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: s = AsyncSearch(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -87,7 +91,7 @@ async def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: result = Repository.search().scan() repos = await get_result(result) @@ -96,7 +100,7 @@ async def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -async def test_scan_iterates_through_all_docs(data_client): +async def test_scan_iterates_through_all_docs(data_client: Any) -> None: s = AsyncSearch(index="flat-git") result = s.scan() commits = await get_result(result) @@ -105,14 +109,14 @@ async def test_scan_iterates_through_all_docs(data_client): assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -async def get_result(b): +async def get_result(b: Any) -> Any: a = [] async for i in b: a.append(i) return a -async def test_multi_search(data_client): +async def test_multi_search(data_client: Any) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") @@ -129,7 +133,7 @@ async def test_multi_search(data_client): assert r2._search is s2 -async def test_multi_missing(data_client): +async def test_multi_missing(data_client: Any) -> None: s1 = Repository.search() s2 = AsyncSearch(index="flat-git") s3 = AsyncSearch(index="does_not_exist") @@ -152,7 +156,7 @@ async def test_multi_missing(data_client): assert r3 is None -async def test_raw_subfield_can_be_used_in_aggs(data_client): +async def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: s = AsyncSearch(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) r = await s.execute() diff --git a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py index 9c4e7fb6..46e515df 100644 --- a/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_async/test_server/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -7,15 +8,20 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. +from typing import Any + import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.update_by_query import AsyncUpdateByQuery from opensearchpy.helpers.search import Q -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -async def test_update_by_query_no_script(write_client, setup_ubq_tests): +async def test_update_by_query_no_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( @@ -34,7 +40,9 @@ async def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -async def test_update_by_query_with_script(write_client, setup_ubq_tests): +async def test_update_by_query_with_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( @@ -51,7 +59,9 @@ async def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -async def test_delete_by_query_with_script(write_client, setup_ubq_tests): +async def test_delete_by_query_with_script( + write_client: Any, setup_ubq_tests: Any +) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py index f3f7fe32..5af06a24 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_alerting.py @@ -14,12 +14,13 @@ import unittest import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.helpers.test import OPENSEARCH_VERSION from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAlertingPlugin(AsyncOpenSearchTestCase): @@ -27,7 +28,7 @@ class TestAlertingPlugin(AsyncOpenSearchTestCase): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_create_destination(self): + async def test_create_destination(self) -> None: # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -43,7 +44,7 @@ async def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_destination(self): + async def test_get_destination(self) -> None: # Create a dummy destination await self.test_create_destination() @@ -58,7 +59,7 @@ async def test_get_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_create_monitor(self): + async def test_create_monitor(self) -> None: # Create a dummy destination await self.test_create_destination() @@ -123,11 +124,11 @@ async def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_search_monitor(self): + async def test_search_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -141,11 +142,11 @@ async def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_get_monitor(self): + async def test_get_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -165,11 +166,11 @@ async def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - async def test_run_monitor(self): + async def test_run_monitor(self) -> None: # Create a dummy monitor await self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query diff --git a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py index d4379648..4f5fcfa1 100644 --- a/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_async/test_server/test_plugins/test_index_management.py @@ -12,12 +12,13 @@ from __future__ import unicode_literals import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy.exceptions import NotFoundError from .. import AsyncOpenSearchTestCase -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestIndexManagementPlugin(AsyncOpenSearchTestCase): @@ -68,7 +69,7 @@ class TestIndexManagementPlugin(AsyncOpenSearchTestCase): } } - async def test_create_policy(self): + async def test_create_policy(self) -> None: # Test to create policy response = await self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -77,7 +78,7 @@ async def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_get_policy(self): + async def test_get_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -88,7 +89,7 @@ async def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - async def test_update_policy(self): + async def test_update_policy(self) -> None: # Create a policy await self.test_create_policy() @@ -110,7 +111,7 @@ async def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - async def test_delete_policy(self): + async def test_delete_policy(self) -> None: # Create a policy await self.test_create_policy() diff --git a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py index 27b20113..53aeb3ad 100644 --- a/test_opensearchpy/test_async/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_async/test_server/test_rest_api_spec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,14 +27,16 @@ """ -Dynamically generated set of TestCases based on set of yaml files decribing +Dynamically generated set of TestCases based on set of yaml files describing some integration tests. These files are shared among all official OpenSearch clients. """ import inspect import warnings +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy import OpenSearchWarning from opensearchpy.helpers.test import _get_version @@ -46,19 +49,19 @@ YamlRunner, ) -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio OPENSEARCH_VERSION = None -async def await_if_coro(x): +async def await_if_coro(x: Any) -> Any: if inspect.iscoroutine(x): return await x return x class AsyncYamlRunner(YamlRunner): - async def setup(self): + async def setup(self) -> None: # Pull skips from individual tests to not do unnecessary setup. skip_code = [] for action in self._run_code: @@ -76,12 +79,12 @@ async def setup(self): if self._setup_code: await self.run_code(self._setup_code) - async def teardown(self): + async def teardown(self) -> Any: if self._teardown_code: self.section("teardown") await self.run_code(self._teardown_code) - async def opensearch_version(self): + async def opensearch_version(self) -> Any: global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (await self.client.info())["version"]["number"] @@ -91,10 +94,10 @@ async def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 999 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name: str) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - async def run(self): + async def run(self) -> Any: try: await self.setup() self.section("test") @@ -105,8 +108,8 @@ async def run(self): except Exception: pass - async def run_code(self, test): - """Execute an instruction based on it's type.""" + async def run_code(self, test: Any) -> Any: + """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] @@ -117,7 +120,7 @@ async def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - async def run_do(self, action): + async def run_do(self, action: Any) -> Any: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -126,7 +129,7 @@ async def run_do(self, action): assert len(action) == 1 # Remove the x_pack_rest_user authentication - # if it's given via headers. We're already authenticated + # if its given via headers. We're already authenticated # via the 'elastic' user. if ( headers @@ -169,7 +172,7 @@ async def run_do(self, action): # Filter out warnings raised by other components. caught_warnings = [ - str(w.message) + str(w.message) # type: ignore for w in caught_warnings if w.category == OpenSearchWarning and str(w.message) not in allowed_warnings @@ -177,13 +180,13 @@ async def run_do(self, action): # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. - if warn and sorted(warn) != sorted(caught_warnings): + if warn and sorted(warn) != sorted(caught_warnings): # type: ignore raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) - async def run_skip(self, skip): + async def run_skip(self, skip: Any) -> Any: if "features" in skip: features = skip["features"] if not isinstance(features, (tuple, list)): @@ -203,19 +206,19 @@ async def run_skip(self, skip): if min_version <= (await self.opensearch_version()) <= max_version: pytest.skip(reason) - async def _feature_enabled(self, name): + async def _feature_enabled(self, name: str) -> Any: return False -@pytest.fixture(scope="function") -def async_runner(async_client): +@pytest.fixture(scope="function") # type: ignore +def async_runner(async_client: Any) -> AsyncYamlRunner: return AsyncYamlRunner(async_client) if RUN_ASYNC_REST_API_TESTS: - @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - async def test_rest_api_spec(test_spec, async_runner): + @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore + async def test_rest_api_spec(test_spec: Any, async_runner: Any) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") async_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_async/test_server_secured/__init__.py b/test_opensearchpy/test_async/test_server_secured/__init__.py index 6c0097cd..22c54ac8 100644 --- a/test_opensearchpy/test_async/test_server_secured/__init__.py +++ b/test_opensearchpy/test_async/test_server_secured/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py index 39189c21..6751ed29 100644 --- a/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_async/test_server_secured/test_security_plugin.py @@ -11,18 +11,19 @@ from __future__ import unicode_literals -from unittest import IsolatedAsyncioTestCase +from unittest import IsolatedAsyncioTestCase # type: ignore import pytest +from _pytest.mark.structures import MarkDecorator from opensearchpy._async.helpers.test import get_test_client from opensearchpy.connection.async_connections import add_connection from opensearchpy.exceptions import NotFoundError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio -class TestSecurityPlugin(IsolatedAsyncioTestCase): +class TestSecurityPlugin(IsolatedAsyncioTestCase): # type: ignore ROLE_NAME = "test-role" ROLE_CONTENT = { "cluster_permissions": ["cluster_monitor"], @@ -40,17 +41,17 @@ class TestSecurityPlugin(IsolatedAsyncioTestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - async def asyncSetUp(self): + async def asyncSetUp(self) -> None: self.client = await get_test_client( verify_certs=False, http_auth=("admin", "admin") ) await add_connection("default", self.client) - async def asyncTearDown(self): + async def asyncTearDown(self) -> None: if self.client: await self.client.close() - async def test_create_role(self): + async def test_create_role(self) -> None: # Test to create role response = await self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -59,7 +60,7 @@ async def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_role_with_body_param_empty(self): + async def test_create_role_with_body_param_empty(self) -> None: try: await self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -67,7 +68,7 @@ async def test_create_role_with_body_param_empty(self): else: assert False - async def test_get_role(self): + async def test_get_role(self) -> None: # Create a role await self.test_create_role() @@ -77,7 +78,7 @@ async def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - async def test_update_role(self): + async def test_update_role(self) -> None: # Create a role await self.test_create_role() @@ -92,7 +93,7 @@ async def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_role(self): + async def test_delete_role(self) -> None: # Create a role await self.test_create_role() @@ -105,7 +106,7 @@ async def test_delete_role(self): with self.assertRaises(NotFoundError): response = await self.client.security.get_role(self.ROLE_NAME) - async def test_create_user(self): + async def test_create_user(self) -> None: # Test to create user response = await self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -114,7 +115,7 @@ async def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_create_user_with_body_param_empty(self): + async def test_create_user_with_body_param_empty(self) -> None: try: await self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -122,7 +123,7 @@ async def test_create_user_with_body_param_empty(self): else: assert False - async def test_create_user_with_role(self): + async def test_create_user_with_role(self) -> None: await self.test_create_role() # Test to create user @@ -137,7 +138,7 @@ async def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - async def test_get_user(self): + async def test_get_user(self) -> None: # Create a user await self.test_create_user() @@ -147,7 +148,7 @@ async def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - async def test_update_user(self): + async def test_update_user(self) -> None: # Create a user await self.test_create_user() @@ -162,7 +163,7 @@ async def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - async def test_delete_user(self): + async def test_delete_user(self) -> None: # Create a user await self.test_create_user() @@ -174,3 +175,55 @@ async def test_delete_user(self): # Try fetching the user with self.assertRaises(NotFoundError): response = await self.client.security.get_user(self.USER_NAME) + + async def test_health_check(self) -> None: + response = await self.client.security.health_check() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + async def test_health(self) -> None: + response = await self.client.security.health() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + AUDIT_CONFIG_SETTINGS = { + "enabled": True, + "audit": { + "ignore_users": [], + "ignore_requests": [], + "disabled_rest_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "disabled_transport_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "log_request_body": False, + "resolve_indices": False, + "resolve_bulk_requests": False, + "exclude_sensitive_headers": True, + "enable_transport": False, + "enable_rest": True, + }, + "compliance": { + "enabled": True, + "write_log_diffs": False, + "read_watched_fields": {}, + "read_ignore_users": [], + "write_watched_indices": [], + "write_ignore_users": [], + "read_metadata_only": True, + "write_metadata_only": True, + "external_config": False, + "internal_config": True, + }, + } + + async def test_update_audit_config(self) -> None: + response = await self.client.security.update_audit_config( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) + + async def test_update_audit_configuration(self) -> None: + response = await self.client.security.update_audit_configuration( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) diff --git a/test_opensearchpy/test_async/test_signer.py b/test_opensearchpy/test_async/test_signer.py index 2c642db4..319340da 100644 --- a/test_opensearchpy/test_async/test_signer.py +++ b/test_opensearchpy/test_async/test_signer.py @@ -8,17 +8,17 @@ # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -import sys import uuid import pytest +from _pytest.mark.structures import MarkDecorator from mock import Mock -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class TestAsyncSigner: - def mock_session(self): + def mock_session(self) -> Mock: access_key = uuid.uuid4().hex secret_key = uuid.uuid4().hex token = uuid.uuid4().hex @@ -26,42 +26,36 @@ def mock_session(self): dummy_session.access_key = access_key dummy_session.secret_key = secret_key dummy_session.token = token + + del dummy_session.get_frozen_credentials + return dummy_session - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAsyncAuth requires python3.6+" - ) - async def test_aws_signer_async_as_http_auth(self): + async def test_aws_signer_async_as_http_auth(self) -> None: region = "us-west-2" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth auth = AWSV4SignerAsyncAuth(self.mock_session(), region) - headers = auth("GET", "http://localhost", {}, {}) + headers = auth("GET", "http://localhost") assert "Authorization" in headers assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - async def test_aws_signer_async_when_region_is_null(self): + async def test_aws_signer_async_when_region_is_null(self) -> None: session = self.mock_session() from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth with pytest.raises(ValueError) as e: - AWSV4SignerAsyncAuth(session, None) + AWSV4SignerAsyncAuth(session, None) # type: ignore assert str(e.value) == "Region cannot be empty" with pytest.raises(ValueError) as e: AWSV4SignerAsyncAuth(session, "") assert str(e.value) == "Region cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - async def test_aws_signer_async_when_credentials_is_null(self): + async def test_aws_signer_async_when_credentials_is_null(self) -> None: region = "us-west-1" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth @@ -70,18 +64,43 @@ async def test_aws_signer_async_when_credentials_is_null(self): AWSV4SignerAsyncAuth(None, region) assert str(e.value) == "Credentials cannot be empty" - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAsyncAuth requires python3.6+" - ) - async def test_aws_signer_async_when_service_is_specified(self): + async def test_aws_signer_async_when_service_is_specified(self) -> None: region = "us-west-2" service = "aoss" from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth auth = AWSV4SignerAsyncAuth(self.mock_session(), region, service) - headers = auth("GET", "http://localhost", {}, {}) + headers = auth("GET", "http://localhost") assert "Authorization" in headers assert "X-Amz-Date" in headers assert "X-Amz-Security-Token" in headers assert "X-Amz-Content-SHA256" in headers + + +class TestAsyncSignerWithFrozenCredentials(TestAsyncSigner): + def mock_session(self, disable_get_frozen: bool = True) -> Mock: + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) + + return dummy_session + + async def test_aws_signer_async_frozen_credentials_as_http_auth(self) -> None: + region = "us-west-2" + + from opensearchpy.helpers.asyncsigner import AWSV4SignerAsyncAuth + + mock_session = self.mock_session() + + auth = AWSV4SignerAsyncAuth(mock_session, region) + headers = auth("GET", "http://localhost") + assert "Authorization" in headers + assert "X-Amz-Date" in headers + assert "X-Amz-Security-Token" in headers + assert len(mock_session.get_frozen_credentials.mock_calls) == 1 diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index fc018e43..4ef80707 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -30,8 +30,10 @@ import asyncio import json +from typing import Any import pytest +from _pytest.mark.structures import MarkDecorator from mock import patch from opensearchpy import AIOHttpConnection, AsyncTransport @@ -39,20 +41,20 @@ from opensearchpy.connection_pool import DummyConnectionPool from opensearchpy.exceptions import ConnectionError, TransportError -pytestmark = pytest.mark.asyncio +pytestmark: MarkDecorator = pytest.mark.asyncio class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) self.delay = kwargs.pop("delay", 0) - self.calls = [] + self.calls: Any = [] self.closed = False super(DummyConnection, self).__init__(**kwargs) - async def perform_request(self, *args, **kwargs): + async def perform_request(self, *args: Any, **kwargs: Any) -> Any: if self.closed: raise RuntimeError("This connection is closed") if self.delay: @@ -62,7 +64,7 @@ async def perform_request(self, *args, **kwargs): raise self.exception return self.status, self.headers, self.data - async def close(self): + async def close(self) -> None: if self.closed: raise RuntimeError("This connection is already closed") self.closed = True @@ -120,16 +122,16 @@ async def close(self): class TestTransport: - async def test_single_connection_uses_dummy_connection_pool(self): - t = AsyncTransport([{}]) - await t._async_call() - assert isinstance(t.connection_pool, DummyConnectionPool) - t = AsyncTransport([{"host": "localhost"}]) - await t._async_call() - assert isinstance(t.connection_pool, DummyConnectionPool) + async def test_single_connection_uses_dummy_connection_pool(self) -> None: + t1: Any = AsyncTransport([{}]) + await t1._async_call() + assert isinstance(t1.connection_pool, DummyConnectionPool) + t2: Any = AsyncTransport([{"host": "localhost"}]) + await t2._async_call() + assert isinstance(t2.connection_pool, DummyConnectionPool) - async def test_request_timeout_extracted_from_params_and_passed(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_request_timeout_extracted_from_params_and_passed(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"request_timeout": 42}) assert 1 == len(t.get_connection().calls) @@ -140,8 +142,8 @@ async def test_request_timeout_extracted_from_params_and_passed(self): "headers": None, } == t.get_connection().calls[0][1] - async def test_timeout_extracted_from_params_and_passed(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_timeout_extracted_from_params_and_passed(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", params={"timeout": 84}) assert 1 == len(t.get_connection().calls) @@ -152,8 +154,10 @@ async def test_timeout_extracted_from_params_and_passed(self): "headers": None, } == t.get_connection().calls[0][1] - async def test_opaque_id(self): - t = AsyncTransport([{}], opaque_id="app-1", connection_class=DummyConnection) + async def test_opaque_id(self) -> None: + t: Any = AsyncTransport( + [{}], opaque_id="app-1", connection_class=DummyConnection + ) await t.perform_request("GET", "/") assert 1 == len(t.get_connection().calls) @@ -174,8 +178,8 @@ async def test_opaque_id(self): "headers": {"x-opaque-id": "request-1"}, } == t.get_connection().calls[1][1] - async def test_request_with_custom_user_agent_header(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_request_with_custom_user_agent_header(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request( "GET", "/", headers={"user-agent": "my-custom-value/1.2.3"} @@ -187,8 +191,8 @@ async def test_request_with_custom_user_agent_header(self): "headers": {"user-agent": "my-custom-value/1.2.3"}, } == t.get_connection().calls[0][1] - async def test_send_get_body_as_source(self): - t = AsyncTransport( + async def test_send_get_body_as_source(self) -> None: + t: Any = AsyncTransport( [{}], send_get_body_as="source", connection_class=DummyConnection ) @@ -196,8 +200,8 @@ async def test_send_get_body_as_source(self): assert 1 == len(t.get_connection().calls) assert ("GET", "/", {"source": "{}"}, None) == t.get_connection().calls[0][0] - async def test_send_get_body_as_post(self): - t = AsyncTransport( + async def test_send_get_body_as_post(self) -> None: + t: Any = AsyncTransport( [{}], send_get_body_as="POST", connection_class=DummyConnection ) @@ -205,8 +209,8 @@ async def test_send_get_body_as_post(self): assert 1 == len(t.get_connection().calls) assert ("POST", "/", None, b"{}") == t.get_connection().calls[0][0] - async def test_body_gets_encoded_into_bytes(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_body_gets_encoded_into_bytes(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好") assert 1 == len(t.get_connection().calls) @@ -217,16 +221,16 @@ async def test_body_gets_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd", ) == t.get_connection().calls[0][0] - async def test_body_bytes_get_passed_untouched(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_body_bytes_get_passed_untouched(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" await t.perform_request("GET", "/", body=body) assert 1 == len(t.get_connection().calls) assert ("GET", "/", None, body) == t.get_connection().calls[0][0] - async def test_body_surrogates_replaced_encoded_into_bytes(self): - t = AsyncTransport([{}], connection_class=DummyConnection) + async def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: + t: Any = AsyncTransport([{}], connection_class=DummyConnection) await t.perform_request("GET", "/", body="你好\uda6a") assert 1 == len(t.get_connection().calls) @@ -237,37 +241,37 @@ async def test_body_surrogates_replaced_encoded_into_bytes(self): b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa", ) == t.get_connection().calls[0][0] - async def test_kwargs_passed_on_to_connections(self): - t = AsyncTransport([{"host": "google.com"}], port=123) + async def test_kwargs_passed_on_to_connections(self) -> None: + t: Any = AsyncTransport([{"host": "google.com"}], port=123) await t._async_call() assert 1 == len(t.connection_pool.connections) assert "http://google.com:123" == t.connection_pool.connections[0].host - async def test_kwargs_passed_on_to_connection_pool(self): + async def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() - t = AsyncTransport([{}, {}], dead_timeout=dt) + t: Any = AsyncTransport([{}, {}], dead_timeout=dt) await t._async_call() assert dt is t.connection_pool.dead_timeout - async def test_custom_connection_class(self): + async def test_custom_connection_class(self) -> None: class MyConnection(object): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs - t = AsyncTransport([{}], connection_class=MyConnection) + t: Any = AsyncTransport([{}], connection_class=MyConnection) await t._async_call() assert 1 == len(t.connection_pool.connections) assert isinstance(t.connection_pool.connections[0], MyConnection) - async def test_add_connection(self): - t = AsyncTransport([{}], randomize_hosts=False) + async def test_add_connection(self) -> None: + t: Any = AsyncTransport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) assert 2 == len(t.connection_pool.connections) assert "http://google.com:1234" == t.connection_pool.connections[1].host - async def test_request_will_fail_after_X_retries(self): - t = AsyncTransport( + async def test_request_will_fail_after_X_retries(self) -> None: + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) @@ -281,8 +285,8 @@ async def test_request_will_fail_after_X_retries(self): assert connection_error assert 4 == len(t.get_connection().calls) - async def test_failed_connection_will_be_marked_as_dead(self): - t = AsyncTransport( + async def test_failed_connection_will_be_marked_as_dead(self) -> None: + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) @@ -296,9 +300,11 @@ async def test_failed_connection_will_be_marked_as_dead(self): assert connection_error assert 0 == len(t.connection_pool.connections) - async def test_resurrected_connection_will_be_marked_as_live_on_success(self): + async def test_resurrected_connection_will_be_marked_as_live_on_success( + self, + ) -> None: for method in ("GET", "HEAD"): - t = AsyncTransport([{}, {}], connection_class=DummyConnection) + t: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) await t._async_call() con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() @@ -309,8 +315,10 @@ async def test_resurrected_connection_will_be_marked_as_live_on_success(self): assert 1 == len(t.connection_pool.connections) assert 1 == len(t.connection_pool.dead_count) - async def test_sniff_will_use_seed_connections(self): - t = AsyncTransport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) + async def test_sniff_will_use_seed_connections(self) -> None: + t: Any = AsyncTransport( + [{"data": CLUSTER_NODES}], connection_class=DummyConnection + ) await t._async_call() t.set_connections([{"data": "invalid"}]) @@ -318,8 +326,8 @@ async def test_sniff_will_use_seed_connections(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_fetches_and_uses_nodes_list(self): - t = AsyncTransport( + async def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -330,8 +338,8 @@ async def test_sniff_on_start_fetches_and_uses_nodes_list(self): assert 1 == len(t.connection_pool.connections) assert "http://1.1.1.1:123" == t.get_connection().host - async def test_sniff_on_start_ignores_sniff_timeout(self): - t = AsyncTransport( + async def test_sniff_on_start_ignores_sniff_timeout(self) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -344,8 +352,8 @@ async def test_sniff_on_start_ignores_sniff_timeout(self): 0 ].calls[0] - async def test_sniff_uses_sniff_timeout(self): - t = AsyncTransport( + async def test_sniff_uses_sniff_timeout(self) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, @@ -357,8 +365,8 @@ async def test_sniff_uses_sniff_timeout(self): 0 ].calls[0] - async def test_sniff_reuses_connection_instances_if_possible(self): - t = AsyncTransport( + async def test_sniff_reuses_connection_instances_if_possible(self) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, @@ -371,8 +379,8 @@ async def test_sniff_reuses_connection_instances_if_possible(self): assert 1 == len(t.connection_pool.connections) assert connection is t.get_connection() - async def test_sniff_on_fail_triggers_sniffing_on_fail(self): - t = AsyncTransport( + async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -394,9 +402,11 @@ async def test_sniff_on_fail_triggers_sniffing_on_fail(self): assert "http://1.1.1.1:123" == t.get_connection().host @patch("opensearchpy._async.transport.AsyncTransport.sniff_hosts") - async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): + async def test_sniff_on_fail_failing_does_not_prevent_retires( + self, sniff_hosts: Any + ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] - t = AsyncTransport( + t: Any = AsyncTransport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -412,8 +422,8 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts) assert 1 == len(conn_err.calls) assert 1 == len(conn_data.calls) - async def test_sniff_after_n_seconds(self, event_loop): - t = AsyncTransport( + async def test_sniff_after_n_seconds(self, event_loop: Any) -> None: + t: Any = AsyncTransport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, @@ -433,10 +443,10 @@ async def test_sniff_after_n_seconds(self, event_loop): assert "http://1.1.1.1:123" == t.get_connection().host assert event_loop.time() - 1 < t.last_sniff < event_loop.time() + 0.01 - async def test_sniff_7x_publish_host(self): + async def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. - t = AsyncTransport( + t: Any = AsyncTransport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, @@ -449,23 +459,25 @@ async def test_sniff_7x_publish_host(self): "port": 123, } - async def test_transport_close_closes_all_pool_connections(self): - t = AsyncTransport([{}], connection_class=DummyConnection) - await t._async_call() + async def test_transport_close_closes_all_pool_connections(self) -> None: + t1: Any = AsyncTransport([{}], connection_class=DummyConnection) + await t1._async_call() - assert not any([conn.closed for conn in t.connection_pool.connections]) - await t.close() - assert all([conn.closed for conn in t.connection_pool.connections]) + assert not any([conn.closed for conn in t1.connection_pool.connections]) + await t1.close() + assert all([conn.closed for conn in t1.connection_pool.connections]) - t = AsyncTransport([{}, {}], connection_class=DummyConnection) - await t._async_call() + t2: Any = AsyncTransport([{}, {}], connection_class=DummyConnection) + await t2._async_call() - assert not any([conn.closed for conn in t.connection_pool.connections]) - await t.close() - assert all([conn.closed for conn in t.connection_pool.connections]) + assert not any([conn.closed for conn in t2.connection_pool.connections]) + await t2.close() + assert all([conn.closed for conn in t2.connection_pool.connections]) - async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop): - t = AsyncTransport( + async def test_sniff_on_start_error_if_no_sniffed_hosts( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"data": ""}, {"data": ""}, @@ -481,8 +493,10 @@ async def test_sniff_on_start_error_if_no_sniffed_hosts(self, event_loop): await t._async_call() assert str(e.value) == "TransportError(N/A, 'Unable to sniff hosts.')" - async def test_sniff_on_start_waits_for_sniff_to_complete(self, event_loop): - t = AsyncTransport( + async def test_sniff_on_start_waits_for_sniff_to_complete( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"delay": 1, "data": ""}, {"delay": 1, "data": ""}, @@ -517,8 +531,10 @@ async def test_sniff_on_start_waits_for_sniff_to_complete(self, event_loop): # and then resolved immediately after. assert 1 <= duration < 2 - async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): - t = AsyncTransport( + async def test_sniff_on_start_close_unlocks_async_calls( + self, event_loop: Any + ) -> None: + t: Any = AsyncTransport( [ {"delay": 10, "data": CLUSTER_NODES}, ], @@ -544,7 +560,7 @@ async def test_sniff_on_start_close_unlocks_async_calls(self, event_loop): # A lot quicker than 10 seconds defined in 'delay' assert duration < 1 - async def test_init_connection_pool_with_many_hosts(self): + async def test_init_connection_pool_with_many_hosts(self) -> None: """ Check init of connection pool with multiple connections. @@ -555,14 +571,14 @@ async def test_init_connection_pool_with_many_hosts(self): """ amt_hosts = 4 hosts = [{"host": "localhost", "port": 9092}] * amt_hosts - t = AsyncTransport( + t: Any = AsyncTransport( hosts=hosts, ) await t._async_init() assert len(t.connection_pool.connections) == amt_hosts await t._async_call() - async def test_init_pool_with_connection_class_to_many_hosts(self): + async def test_init_pool_with_connection_class_to_many_hosts(self) -> None: """ Check init of connection pool with user specified connection_class. @@ -573,7 +589,7 @@ async def test_init_pool_with_connection_class_to_many_hosts(self): """ amt_hosts = 4 hosts = [{"host": "localhost", "port": 9092}] * amt_hosts - t = AsyncTransport( + t: Any = AsyncTransport( hosts=hosts, connection_class=AIOHttpConnection, ) diff --git a/test_opensearchpy/test_cases.py b/test_opensearchpy/test_cases.py index c41b86a8..e36d9bb6 100644 --- a/test_opensearchpy/test_cases.py +++ b/test_opensearchpy/test_cases.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,21 +27,30 @@ from collections import defaultdict -from unittest import SkipTest # noqa: F401 -from unittest import TestCase +from typing import Any, Sequence +from unittest import SkipTest, TestCase from opensearchpy import OpenSearch class DummyTransport(object): - def __init__(self, hosts, responses=None, **kwargs): + def __init__( + self, hosts: Sequence[str], responses: Any = None, **kwargs: Any + ) -> None: self.hosts = hosts self.responses = responses - self.call_count = 0 - self.calls = defaultdict(list) + self.call_count: int = 0 + self.calls: Any = defaultdict(list) - def perform_request(self, method, url, params=None, headers=None, body=None): - resp = 200, {} + def perform_request( + self, + method: str, + url: str, + params: Any = None, + headers: Any = None, + body: Any = None, + ) -> Any: + resp: Any = (200, {}) if self.responses: resp = self.responses[self.call_count] self.call_count += 1 @@ -49,14 +59,14 @@ def perform_request(self, method, url, params=None, headers=None, body=None): class OpenSearchTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super(OpenSearchTestCase, self).setUp() - self.client = OpenSearch(transport_class=DummyTransport) + self.client: Any = OpenSearch(transport_class=DummyTransport) # type: ignore - def assert_call_count_equals(self, count): + def assert_call_count_equals(self, count: int) -> None: self.assertEqual(count, self.client.transport.call_count) - def assert_url_called(self, method, url, count=1): + def assert_url_called(self, method: str, url: str, count: int = 1) -> Any: self.assertIn((method, url), self.client.transport.calls) calls = self.client.transport.calls[(method, url)] self.assertEqual(count, len(calls)) @@ -64,16 +74,19 @@ def assert_url_called(self, method, url, count=1): class TestOpenSearchTestCase(OpenSearchTestCase): - def test_our_transport_used(self): + def test_our_transport_used(self) -> None: self.assertIsInstance(self.client.transport, DummyTransport) - def test_start_with_0_call(self): + def test_start_with_0_call(self) -> None: self.assert_call_count_equals(0) - def test_each_call_is_recorded(self): + def test_each_call_is_recorded(self) -> None: self.client.transport.perform_request("GET", "/") self.client.transport.perform_request("DELETE", "/42", params={}, body="body") self.assert_call_count_equals(2) self.assertEqual( [({}, None, "body")], self.assert_url_called("DELETE", "/42", 1) ) + + +__all__ = ["SkipTest", "TestCase"] diff --git a/test_opensearchpy/test_client/__init__.py b/test_opensearchpy/test_client/__init__.py index 0a5747ca..3174772e 100644 --- a/test_opensearchpy/test_client/__init__.py +++ b/test_opensearchpy/test_client/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,19 +30,20 @@ import warnings -from opensearchpy.client import OpenSearch, _normalize_hosts +from opensearchpy.client import OpenSearch +from opensearchpy.client.utils import _normalize_hosts from ..test_cases import OpenSearchTestCase, TestCase class TestNormalizeHosts(TestCase): - def test_none_uses_defaults(self): + def test_none_uses_defaults(self) -> None: self.assertEqual([{}], _normalize_hosts(None)) - def test_strings_are_used_as_hostnames(self): + def test_strings_are_used_as_hostnames(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts(["elastic.co"])) - def test_strings_are_parsed_for_port_and_user(self): + def test_strings_are_parsed_for_port_and_user(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42}, @@ -50,7 +52,7 @@ def test_strings_are_parsed_for_port_and_user(self): _normalize_hosts(["elastic.co:42", "user:secre%5D@elastic.co"]), ) - def test_strings_are_parsed_for_scheme(self): + def test_strings_are_parsed_for_scheme(self) -> None: self.assertEqual( [ {"host": "elastic.co", "port": 42, "use_ssl": True}, @@ -67,23 +69,23 @@ def test_strings_are_parsed_for_scheme(self): ), ) - def test_dicts_are_left_unchanged(self): + def test_dicts_are_left_unchanged(self) -> None: self.assertEqual( [{"host": "local", "extra": 123}], _normalize_hosts([{"host": "local", "extra": 123}]), ) - def test_single_string_is_wrapped_in_list(self): + def test_single_string_is_wrapped_in_list(self) -> None: self.assertEqual([{"host": "elastic.co"}], _normalize_hosts("elastic.co")) class TestClient(OpenSearchTestCase): - def test_request_timeout_is_passed_through_unescaped(self): + def test_request_timeout_is_passed_through_unescaped(self) -> None: self.client.ping(request_timeout=0.1) calls = self.assert_url_called("HEAD", "/") self.assertEqual([({"request_timeout": 0.1}, {}, None)], calls) - def test_params_is_copied_when(self): + def test_params_is_copied_when(self) -> None: rt = object() params = dict(request_timeout=rt) self.client.ping(params=params) @@ -95,7 +97,7 @@ def test_params_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_headers_is_copied_when(self): + def test_headers_is_copied_when(self) -> None: hv = "value" headers = dict(Authentication=hv) self.client.ping(headers=headers) @@ -107,40 +109,40 @@ def test_headers_is_copied_when(self): ) self.assertFalse(calls[0][0] is calls[1][0]) - def test_from_in_search(self): + def test_from_in_search(self) -> None: self.client.search(index="i", from_=10) calls = self.assert_url_called("POST", "/i/_search") self.assertEqual([({"from": "10"}, {}, None)], calls) - def test_repr_contains_hosts(self): + def test_repr_contains_hosts(self) -> None: self.assertEqual("", repr(self.client)) - def test_repr_subclass(self): + def test_repr_subclass(self) -> None: class OtherOpenSearch(OpenSearch): pass self.assertEqual("", repr(OtherOpenSearch())) - def test_repr_contains_hosts_passed_in(self): + def test_repr_contains_hosts_passed_in(self) -> None: self.assertIn("opensearchpy.org", repr(OpenSearch(["opensearch.org:123"]))) - def test_repr_truncates_host_to_5(self): + def test_repr_truncates_host_to_5(self) -> None: hosts = [{"host": "opensearch" + str(i)} for i in range(10)] client = OpenSearch(hosts) self.assertNotIn("opensearch5", repr(client)) self.assertIn("...", repr(client)) - def test_index_uses_post_if_id_is_empty(self): + def test_index_uses_post_if_id_is_empty(self) -> None: self.client.index(index="my-index", id="", body={}) self.assert_url_called("POST", "/my-index/_doc") - def test_index_uses_put_if_id_is_not_empty(self): + def test_index_uses_put_if_id_is_not_empty(self) -> None: self.client.index(index="my-index", id=0, body={}) self.assert_url_called("PUT", "/my-index/_doc/0") - def test_tasks_get_without_task_id_deprecated(self): + def test_tasks_get_without_task_id_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get() @@ -154,7 +156,7 @@ def test_tasks_get_without_task_id_deprecated(self): "and will be removed in v8.0. Use client.tasks.list() instead.", ) - def test_tasks_get_with_task_id_not_deprecated(self): + def test_tasks_get_with_task_id_not_deprecated(self) -> None: warnings.simplefilter("always", DeprecationWarning) with warnings.catch_warnings(record=True) as w: self.client.tasks.get("task-1") diff --git a/test_opensearchpy/test_client/test_cluster.py b/test_opensearchpy/test_client/test_cluster.py index 15c43d5f..f170a448 100644 --- a/test_opensearchpy/test_client/test_cluster.py +++ b/test_opensearchpy/test_client/test_cluster.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,18 +30,18 @@ class TestCluster(OpenSearchTestCase): - def test_stats_without_node_id(self): + def test_stats_without_node_id(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") - def test_stats_with_node_id(self): + def test_stats_with_node_id(self) -> None: self.client.cluster.stats("node-1") self.assert_url_called("GET", "/_cluster/stats/nodes/node-1") self.client.cluster.stats(node_id="node-2") self.assert_url_called("GET", "/_cluster/stats/nodes/node-2") - def test_state_with_index_without_metric_defaults_to_all(self): + def test_state_with_index_without_metric_defaults_to_all(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") diff --git a/test_opensearchpy/test_client/test_indices.py b/test_opensearchpy/test_client/test_indices.py index d6737378..668eebd7 100644 --- a/test_opensearchpy/test_client/test_indices.py +++ b/test_opensearchpy/test_client/test_indices.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,19 +30,19 @@ class TestIndices(OpenSearchTestCase): - def test_create_one_index(self): + def test_create_one_index(self) -> None: self.client.indices.create("test-index") self.assert_url_called("PUT", "/test-index") - def test_delete_multiple_indices(self): + def test_delete_multiple_indices(self) -> None: self.client.indices.delete(["test-index", "second.index", "third/index"]) self.assert_url_called("DELETE", "/test-index,second.index,third%2Findex") - def test_exists_index(self): + def test_exists_index(self) -> None: self.client.indices.exists("second.index,third/index") self.assert_url_called("HEAD", "/second.index,third%2Findex") - def test_passing_empty_value_for_required_param_raises_exception(self): + def test_passing_empty_value_for_required_param_raises_exception(self) -> None: self.assertRaises(ValueError, self.client.indices.exists, index=None) self.assertRaises(ValueError, self.client.indices.exists, index=[]) self.assertRaises(ValueError, self.client.indices.exists, index="") diff --git a/test_opensearchpy/test_client/test_overrides.py b/test_opensearchpy/test_client/test_overrides.py index 4ce0931e..16cb3ab4 100644 --- a/test_opensearchpy/test_client/test_overrides.py +++ b/test_opensearchpy/test_client/test_overrides.py @@ -32,57 +32,57 @@ class TestOverriddenUrlTargets(OpenSearchTestCase): - def test_create(self): + def test_create(self) -> None: self.client.create(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_create/test-id") - def test_delete(self): + def test_delete(self) -> None: self.client.delete(index="test-index", id="test-id") self.assert_url_called("DELETE", "/test-index/_doc/test-id") - def test_exists(self): + def test_exists(self) -> None: self.client.exists(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_doc/test-id") - def test_explain(self): + def test_explain(self) -> None: self.client.explain(index="test-index", id="test-id") self.assert_url_called("POST", "/test-index/_explain/test-id") - def test_get(self): + def test_get(self) -> None: self.client.get(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_doc/test-id") - def test_get_source(self): + def test_get_source(self) -> None: self.client.get_source(index="test-index", id="test-id") self.assert_url_called("GET", "/test-index/_source/test-id") - def test_exists_source(self): + def test_exists_source(self) -> None: self.client.exists_source(index="test-index", id="test-id") self.assert_url_called("HEAD", "/test-index/_source/test-id") - def test_index(self): + def test_index(self) -> None: self.client.index(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_doc") self.client.index(index="test-index", id="test-id", body={}) self.assert_url_called("PUT", "/test-index/_doc/test-id") - def test_termvectors(self): + def test_termvectors(self) -> None: self.client.termvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_termvectors") self.client.termvectors(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_termvectors/test-id") - def test_mtermvectors(self): + def test_mtermvectors(self) -> None: self.client.mtermvectors(index="test-index", body={}) self.assert_url_called("POST", "/test-index/_mtermvectors") - def test_update(self): + def test_update(self) -> None: self.client.update(index="test-index", id="test-id", body={}) self.assert_url_called("POST", "/test-index/_update/test-id") - def test_cluster_state(self): + def test_cluster_state(self) -> None: self.client.cluster.state() self.assert_url_called("GET", "/_cluster/state") @@ -92,20 +92,20 @@ def test_cluster_state(self): self.client.cluster.state(index="test-index", metric="test-metric") self.assert_url_called("GET", "/_cluster/state/test-metric/test-index") - def test_cluster_stats(self): + def test_cluster_stats(self) -> None: self.client.cluster.stats() self.assert_url_called("GET", "/_cluster/stats") self.client.cluster.stats(node_id="test-node") self.assert_url_called("GET", "/_cluster/stats/nodes/test-node") - def test_indices_put_mapping(self): + def test_indices_put_mapping(self) -> None: self.client.indices.put_mapping(body={}) self.assert_url_called("PUT", "/_all/_mapping") self.client.indices.put_mapping(index="test-index", body={}) self.assert_url_called("PUT", "/test-index/_mapping") - def test_tasks_get(self): + def test_tasks_get(self) -> None: with pytest.warns(DeprecationWarning): self.client.tasks.get() diff --git a/test_opensearchpy/test_client/test_plugins/__init__.py b/test_opensearchpy/test_client/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_client/test_plugins/__init__.py +++ b/test_opensearchpy/test_client/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_client/test_plugins/test_alerting.py b/test_opensearchpy/test_client/test_plugins/test_alerting.py index 62827655..482a4224 100644 --- a/test_opensearchpy/test_client/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_client/test_plugins/test_alerting.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -11,41 +12,41 @@ class TestAlerting(OpenSearchTestCase): - def test_create_monitor(self): + def test_create_monitor(self) -> None: # Test Post Method self.client.alerting.create_monitor({}) self.assert_url_called("POST", "/_plugins/_alerting/monitors") - def test_run_monitor(self): + def test_run_monitor(self) -> None: self.client.alerting.run_monitor("...") self.assert_url_called("POST", "/_plugins/_alerting/monitors/.../_execute") - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Test Get Method self.client.alerting.get_monitor("...") self.assert_url_called("GET", "/_plugins/_alerting/monitors/...") - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Test Search Method self.client.alerting.search_monitor({}) self.assert_url_called("GET", "/_plugins/_alerting/monitors/_search") - def test_update_monitor(self): + def test_update_monitor(self) -> None: # Test Update Method self.client.alerting.update_monitor("...") self.assert_url_called("PUT", "/_plugins/_alerting/monitors/...") - def test_delete_monitor(self): + def test_delete_monitor(self) -> None: # Test Delete Method self.client.alerting.delete_monitor("...") self.assert_url_called("DELETE", "/_plugins/_alerting/monitors/...") - def test_create_destination(self): + def test_create_destination(self) -> None: # Test Post Method self.client.alerting.create_destination({}) self.assert_url_called("POST", "/_plugins/_alerting/destinations") - def test_get_destination(self): + def test_get_destination(self) -> None: # Test Get Method # Get a specific destination @@ -56,21 +57,21 @@ def test_get_destination(self): self.client.alerting.get_destination() self.assert_url_called("GET", "/_plugins/_alerting/destinations") - def test_update_destination(self): + def test_update_destination(self) -> None: # Test Update Method self.client.alerting.update_destination("...") self.assert_url_called("PUT", "/_plugins/_alerting/destinations/...") - def test_delete_destination(self): + def test_delete_destination(self) -> None: # Test Delete Method self.client.alerting.delete_destination("...") self.assert_url_called("DELETE", "/_plugins/_alerting/destinations/...") - def test_get_alerts(self): + def test_get_alerts(self) -> None: self.client.alerting.get_alerts() self.assert_url_called("GET", "/_plugins/_alerting/monitors/alerts") - def test_acknowledge_alerts(self): + def test_acknowledge_alerts(self) -> None: self.client.alerting.acknowledge_alert("...") self.assert_url_called( "POST", "/_plugins/_alerting/monitors/.../_acknowledge/alerts" diff --git a/test_opensearchpy/test_client/test_plugins/test_index_management.py b/test_opensearchpy/test_client/test_plugins/test_index_management.py index 6b126038..891d6f02 100644 --- a/test_opensearchpy/test_client/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_client/test_plugins/test_index_management.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -11,11 +12,11 @@ class TestIndexManagement(OpenSearchTestCase): - def test_create_policy(self): + def test_create_policy(self) -> None: self.client.index_management.put_policy("...") self.assert_url_called("PUT", "/_plugins/_ism/policies/...") - def test_update_policy(self): + def test_update_policy(self) -> None: self.client.index_management.put_policy( "...", params={"if_seq_no": 7, "if_primary_term": 1} ) @@ -24,33 +25,33 @@ def test_update_policy(self): self.assert_url_called("PUT", "/_plugins/_ism/policies/..."), ) - def test_add_policy(self): + def test_add_policy(self) -> None: self.client.index_management.add_policy("...") self.assert_url_called("POST", "/_plugins/_ism/add/...") - def test_get_policy(self): + def test_get_policy(self) -> None: self.client.index_management.get_policy("...") self.assert_url_called("GET", "/_plugins/_ism/policies/...") - def test_remove_policy_from_index(self): + def test_remove_policy_from_index(self) -> None: self.client.index_management.remove_policy_from_index("...") self.assert_url_called("POST", "/_plugins/_ism/remove/...") - def test_change_policy(self): + def test_change_policy(self) -> None: self.client.index_management.change_policy("...") self.assert_url_called("POST", "/_plugins/_ism/change_policy/...") - def test_retry(self): + def test_retry(self) -> None: self.client.index_management.retry("...") self.assert_url_called("POST", "/_plugins/_ism/retry/...") - def test_explain_index(self): + def test_explain_index(self) -> None: self.client.index_management.explain_index("...", show_policy=True) self.assertEqual( [({"show_policy": b"true"}, {}, None)], self.assert_url_called("GET", "/_plugins/_ism/explain/..."), ) - def test_delete_policy(self): + def test_delete_policy(self) -> None: self.client.index_management.delete_policy("...") self.assert_url_called("DELETE", "/_plugins/_ism/policies/...") diff --git a/test_opensearchpy/test_client/test_plugins/test_plugins_client.py b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py new file mode 100644 index 00000000..ed65dca4 --- /dev/null +++ b/test_opensearchpy/test_client/test_plugins/test_plugins_client.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from opensearchpy.client import OpenSearch + +from ...test_cases import TestCase + + +class TestPluginsClient(TestCase): + def test_plugins_client(self) -> None: + with self.assertWarns(Warning) as w: + client = OpenSearch() + # double-init + client.plugins.__init__(client) # type: ignore + self.assertEqual( + str(w.warnings[0].message), + "Cannot load `alerting` directly to OpenSearch as it already exists. Use `OpenSearch.plugin.alerting` instead.", + ) diff --git a/test_opensearchpy/test_client/test_point_in_time.py b/test_opensearchpy/test_client/test_point_in_time.py index 53742dbe..30940ce4 100644 --- a/test_opensearchpy/test_client/test_point_in_time.py +++ b/test_opensearchpy/test_client/test_point_in_time.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -11,19 +12,36 @@ class TestPointInTime(OpenSearchTestCase): - def test_create_one_point_in_time(self): + def test_create_one_point_in_time(self) -> None: index_name = "test-index" self.client.create_point_in_time(index=index_name) self.assert_url_called("POST", "/test-index/_search/point_in_time") - def test_delete_one_point_in_time(self): + def test_delete_one_point_in_time(self) -> None: self.client.delete_point_in_time(body={"pit_id": ["Sample-PIT-ID"]}) self.assert_url_called("DELETE", "/_search/point_in_time") - def test_delete_all_point_in_time(self): + def test_delete_all_point_in_time(self) -> None: self.client.delete_point_in_time(all=True) self.assert_url_called("DELETE", "/_search/point_in_time/_all") - def test_list_all_point_in_time(self): + def test_list_all_point_in_time(self) -> None: self.client.list_all_point_in_time() self.assert_url_called("GET", "/_search/point_in_time/_all") + + def test_create_pit(self) -> None: + index_name = "test-index" + self.client.create_pit(index=index_name) + self.assert_url_called("POST", "/test-index/_search/point_in_time") + + def test_delete_pit(self) -> None: + self.client.delete_pit(body={"pit_id": ["Sample-PIT-ID"]}) + self.assert_url_called("DELETE", "/_search/point_in_time") + + def test_delete_all_pits(self) -> None: + self.client.delete_all_pits() + self.assert_url_called("DELETE", "/_search/point_in_time/_all") + + def test_get_all_pits(self) -> None: + self.client.get_all_pits() + self.assert_url_called("GET", "/_search/point_in_time/_all") diff --git a/test_opensearchpy/test_client/test_remote_store.py b/test_opensearchpy/test_client/test_remote_store.py new file mode 100644 index 00000000..a9bfc894 --- /dev/null +++ b/test_opensearchpy/test_client/test_remote_store.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +from test_opensearchpy.test_cases import OpenSearchTestCase + + +class TestRemoteStore(OpenSearchTestCase): + def test_remote_store_restore(self) -> None: + self.client.remote_store.restore(body=["index-1"]) + self.assert_url_called("POST", "/_remotestore/_restore") diff --git a/test_opensearchpy/test_client/test_requests.py b/test_opensearchpy/test_client/test_requests.py new file mode 100644 index 00000000..66ec8cbc --- /dev/null +++ b/test_opensearchpy/test_client/test_requests.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from opensearchpy import OpenSearch, RequestsHttpConnection + + +class TestRequests(TestCase): + def test_connection_class(self) -> None: + client = OpenSearch(connection_class=RequestsHttpConnection) + self.assertEqual(client.transport.pool_maxsize, None) + self.assertEqual(client.transport.connection_class, RequestsHttpConnection) + self.assertIsInstance( + client.transport.connection_pool.connections[0], RequestsHttpConnection + ) + + def test_pool_maxsize(self) -> None: + client = OpenSearch(connection_class=RequestsHttpConnection, pool_maxsize=42) + self.assertEqual(client.transport.pool_maxsize, 42) + self.assertEqual( + client.transport.connection_pool.connections[0] + .session.adapters["https://"] + ._pool_maxsize, + 42, + ) diff --git a/test_opensearchpy/test_client/test_urllib3.py b/test_opensearchpy/test_client/test_urllib3.py new file mode 100644 index 00000000..064c49cc --- /dev/null +++ b/test_opensearchpy/test_client/test_urllib3.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + +from unittest import TestCase + +from urllib3.connectionpool import HTTPConnectionPool + +from opensearchpy import OpenSearch, Urllib3HttpConnection + + +class TestUrlLib3(TestCase): + def test_default(self) -> None: + client = OpenSearch() + self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) + self.assertEqual(client.transport.pool_maxsize, None) + + def test_connection_class(self) -> None: + client = OpenSearch(connection_class=Urllib3HttpConnection) + self.assertEqual(client.transport.connection_class, Urllib3HttpConnection) + self.assertIsInstance( + client.transport.connection_pool.connections[0], Urllib3HttpConnection + ) + self.assertIsInstance( + client.transport.connection_pool.connections[0].pool, HTTPConnectionPool + ) + + def test_pool_maxsize(self) -> None: + client = OpenSearch(connection_class=Urllib3HttpConnection, pool_maxsize=42) + self.assertEqual(client.transport.pool_maxsize, 42) + # https://github.com/python/cpython/blob/3.12/Lib/queue.py#L35 + self.assertEqual( + client.transport.connection_pool.connections[0].pool.pool.maxsize, 42 + ) diff --git a/test_opensearchpy/test_client/test_utils.py b/test_opensearchpy/test_client/test_utils.py index 1a4b6809..797624fc 100644 --- a/test_opensearchpy/test_client/test_utils.py +++ b/test_opensearchpy/test_client/test_utils.py @@ -28,21 +28,22 @@ from __future__ import unicode_literals +from typing import Any + from opensearchpy.client.utils import _bulk_body, _escape, _make_path, query_params -from opensearchpy.compat import PY2 -from ..test_cases import SkipTest, TestCase +from ..test_cases import TestCase class TestQueryParams(TestCase): - def setup_method(self, _): - self.calls = [] + def setup_method(self, _: Any) -> None: + self.calls: Any = [] @query_params("simple_param") - def func_to_wrap(self, *args, **kwargs): + def func_to_wrap(self, *args: Any, **kwargs: Any) -> None: self.calls.append((args, kwargs)) - def test_handles_params(self): + def test_handles_params(self) -> None: self.func_to_wrap(params={"simple_param_2": "2"}, simple_param="3") self.assertEqual( self.calls, @@ -57,19 +58,19 @@ def test_handles_params(self): ], ) - def test_handles_headers(self): + def test_handles_headers(self) -> None: self.func_to_wrap(headers={"X-Opaque-Id": "app-1"}) self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "app-1"}})] ) - def test_handles_opaque_id(self): + def test_handles_opaque_id(self) -> None: self.func_to_wrap(opaque_id="request-id") self.assertEqual( self.calls, [((), {"params": {}, "headers": {"x-opaque-id": "request-id"}})] ) - def test_handles_empty_none_and_normalization(self): + def test_handles_empty_none_and_normalization(self) -> None: self.func_to_wrap(params=None) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {}})) @@ -85,7 +86,7 @@ def test_handles_empty_none_and_normalization(self): self.func_to_wrap(headers={"X": "y"}) self.assertEqual(self.calls[-1], ((), {"params": {}, "headers": {"x": "y"}})) - def test_non_escaping_params(self): + def test_non_escaping_params(self) -> None: # the query_params decorator doesn't validate "timeout" it simply avoids escaping as it did self.func_to_wrap(simple_param="x", timeout="4s") self.assertEqual( @@ -110,7 +111,7 @@ def test_non_escaping_params(self): ), ) - def test_per_call_authentication(self): + def test_per_call_authentication(self) -> None: self.func_to_wrap(api_key=("name", "key")) self.assertEqual( self.calls[-1], @@ -155,52 +156,44 @@ def test_per_call_authentication(self): class TestMakePath(TestCase): - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: id = "中文" self.assertEqual( "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) ) - def test_handles_utf_encoded_string(self): - if not PY2: - raise SkipTest("Only relevant for py2") - id = "中文".encode("utf-8") - self.assertEqual( - "/some-index/type/%E4%B8%AD%E6%96%87", _make_path("some-index", "type", id) - ) - class TestEscape(TestCase): - def test_handles_ascii(self): + def test_handles_ascii(self) -> None: string = "abc123" self.assertEqual(b"abc123", _escape(string)) - def test_handles_unicode(self): + def test_handles_unicode(self) -> None: string = "中文" self.assertEqual(b"\xe4\xb8\xad\xe6\x96\x87", _escape(string)) - def test_handles_bytestring(self): + def test_handles_bytestring(self) -> None: string = b"celery-task-meta-c4f1201f-eb7b-41d5-9318-a75a8cfbdaa0" self.assertEqual(string, _escape(string)) class TestBulkBody(TestCase): - def test_proper_bulk_body_as_string_is_not_modified(self): + def test_proper_bulk_body_as_string_is_not_modified(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(string_body, _bulk_body(None, string_body)) - def test_proper_bulk_body_as_bytestring_is_not_modified(self): + def test_proper_bulk_body_as_bytestring_is_not_modified(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n' self.assertEqual(bytestring_body, _bulk_body(None, bytestring_body)) - def test_bulk_body_as_string_adds_trailing_newline(self): + def test_bulk_body_as_string_adds_trailing_newline(self) -> None: string_body = '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( '"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', _bulk_body(None, string_body), ) - def test_bulk_body_as_bytestring_adds_trailing_newline(self): + def test_bulk_body_as_bytestring_adds_trailing_newline(self) -> None: bytestring_body = b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"' self.assertEqual( b'"{"index":{ "_index" : "test"}}\n{"field1": "value1"}"\n', diff --git a/test_opensearchpy/test_connection.py b/test_opensearchpy/test_connection.py deleted file mode 100644 index c2480946..00000000 --- a/test_opensearchpy/test_connection.py +++ /dev/null @@ -1,1132 +0,0 @@ -# -*- coding: utf-8 -*- -# SPDX-License-Identifier: Apache-2.0 -# -# The OpenSearch Contributors require contributions made to -# this file be licensed under the Apache-2.0 license or a -# compatible open source license. -# -# Modifications Copyright OpenSearch Contributors. See -# GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -import gzip -import io -import json -import os -import re -import ssl -import sys -import unittest -import uuid -import warnings -from platform import python_version - -import pytest -import six -import urllib3 -from mock import Mock, patch -from requests.auth import AuthBase -from urllib3._collections import HTTPHeaderDict - -from opensearchpy import __versionstr__ -from opensearchpy.compat import reraise_exceptions -from opensearchpy.connection import ( - Connection, - RequestsHttpConnection, - Urllib3HttpConnection, -) -from opensearchpy.exceptions import ( - ConflictError, - ConnectionError, - NotFoundError, - RequestError, - TransportError, -) - -from .test_cases import SkipTest, TestCase - -try: - from pytest import MonkeyPatch -except ImportError: # Old version of pytest for 2.7 and 3.5 - from _pytest.monkeypatch import MonkeyPatch - -from pytest import raises - -from opensearchpy import OpenSearch, serializer -from opensearchpy.connection import connections - -if sys.version_info > (3, 0): - from test_opensearchpy.TestHttpServer import TestHTTPServer - - -def gzip_decompress(data): - buf = gzip.GzipFile(fileobj=io.BytesIO(data), mode="rb") - return buf.read() - - -class TestBaseConnection(TestCase): - def test_empty_warnings(self): - con = Connection() - with warnings.catch_warnings(record=True) as w: - con._raise_warnings(()) - con._raise_warnings([]) - - self.assertEqual(w, []) - - def test_raises_warnings(self): - con = Connection() - - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings(['299 OpenSearch-7.6.1-aa751 "this is deprecated"']) - - self.assertEqual([str(w.message) for w in warn], ["this is deprecated"]) - - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings( - [ - '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', - '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', - '299 OpenSearch-7.6.1-aa751 "guess what? deprecated"', - ] - ) - - self.assertEqual( - [str(w.message) for w in warn], - ["this is also deprecated", "guess what? deprecated"], - ) - - def test_raises_warnings_when_folded(self): - con = Connection() - with warnings.catch_warnings(record=True) as warn: - con._raise_warnings( - [ - '299 OpenSearch-7.6.1-aa751 "warning",' - '299 OpenSearch-7.6.1-aa751 "folded"', - ] - ) - - self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) - - @unittest.skipIf(six.PY2, "not compatible with python2") - def test_raises_errors(self): - con = Connection() - with self.assertLogs("opensearch") as captured, self.assertRaises( - NotFoundError - ): - con._raise_error(404, "Not found", "application/json") - self.assertEqual(len(captured.output), 1) - - # NB: this should assertNoLogs() but that method is not available until python3.10 - with self.assertRaises(NotFoundError): - con._raise_error(404, "Not found", "text/plain; charset=UTF-8") - - def test_ipv6_host_and_port(self): - for kwargs, expected_host in [ - ({"host": "::1"}, "http://[::1]:9200"), - ({"host": "::1", "port": 443}, "http://[::1]:443"), - ({"host": "::1", "use_ssl": True}, "https://[::1]:9200"), - ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), - ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), - ]: - conn = Connection(**kwargs) - assert conn.host == expected_host - - def test_compatibility_accept_header(self): - try: - conn = Connection() - assert "accept" not in conn.headers - - os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "0" - - conn = Connection() - assert "accept" not in conn.headers - - os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "1" - - conn = Connection() - assert ( - conn.headers["accept"] - == "application/vnd.elasticsearch+json;compatible-with=7" - ) - finally: - os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") - - def test_ca_certs_ssl_cert_file(self): - cert = "/path/to/clientcert.pem" - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setenv("SSL_CERT_FILE", cert) - assert Connection.default_ca_certs() == cert - - def test_ca_certs_ssl_cert_dir(self): - cert = "/path/to/clientcert/dir" - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setenv("SSL_CERT_DIR", cert) - assert Connection.default_ca_certs() == cert - - def test_ca_certs_certifi(self): - import certifi - - assert Connection.default_ca_certs() == certifi.where() - - def test_no_ca_certs(self): - with MonkeyPatch().context() as monkeypatch: - monkeypatch.setitem(sys.modules, "certifi", None) - assert Connection.default_ca_certs() is None - - -class TestUrllib3Connection(TestCase): - def _get_mock_connection(self, connection_params={}, response_body=b"{}"): - con = Urllib3HttpConnection(**connection_params) - - def _dummy_urlopen(*args, **kwargs): - dummy_response = Mock() - dummy_response.headers = HTTPHeaderDict({}) - dummy_response.status = 200 - dummy_response.data = response_body - _dummy_urlopen.call_args = (args, kwargs) - return dummy_response - - con.pool.urlopen = _dummy_urlopen - return con - - def test_ssl_context(self): - try: - context = ssl.create_default_context() - except AttributeError: - # if create_default_context raises an AttributeError Exception - # it means SSLContext is not available for that version of python - # and we should skip this test. - raise SkipTest( - "Test test_ssl_context is skipped cause SSLContext is not available for this version of ptyhon" - ) - - con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) - self.assertEqual(len(con.pool.conn_kw.keys()), 1) - self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) - self.assertTrue(con.use_ssl) - - def test_opaque_id(self): - con = Urllib3HttpConnection(opaque_id="app-1") - self.assertEqual(con.headers["x-opaque-id"], "app-1") - - def test_no_http_compression(self): - con = self._get_mock_connection() - self.assertFalse(con.http_compress) - self.assertNotIn("accept-encoding", con.headers) - - con.perform_request("GET", "/") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertFalse(req_body) - self.assertNotIn("accept-encoding", kwargs["headers"]) - self.assertNotIn("content-encoding", kwargs["headers"]) - - def test_http_compression(self): - con = self._get_mock_connection({"http_compress": True}) - self.assertTrue(con.http_compress) - self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") - - # 'content-encoding' shouldn't be set at a connection level. - # Should be applied only if the request is sent with a body. - self.assertNotIn("content-encoding", con.headers) - - con.perform_request("GET", "/", body=b"{}") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertEqual(gzip_decompress(req_body), b"{}") - self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") - self.assertEqual(kwargs["headers"]["content-encoding"], "gzip") - - con.perform_request("GET", "/") - - (_, _, req_body), kwargs = con.pool.urlopen.call_args - - self.assertFalse(req_body) - self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") - self.assertNotIn("content-encoding", kwargs["headers"]) - - def test_default_user_agent(self): - con = Urllib3HttpConnection() - self.assertEqual( - con._get_default_user_agent(), - "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), - ) - - def test_timeout_set(self): - con = Urllib3HttpConnection(timeout=42) - self.assertEqual(42, con.timeout) - - def test_keep_alive_is_on_by_default(self): - con = Urllib3HttpConnection() - self.assertEqual( - { - "connection": "keep-alive", - "content-type": "application/json", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth(self): - con = Urllib3HttpConnection(http_auth="username:secret") - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "connection": "keep-alive", - "content-type": "application/json", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth_tuple(self): - con = Urllib3HttpConnection(http_auth=("username", "secret")) - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "content-type": "application/json", - "connection": "keep-alive", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - def test_http_auth_list(self): - con = Urllib3HttpConnection(http_auth=["username", "secret"]) - self.assertEqual( - { - "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", - "content-type": "application/json", - "connection": "keep-alive", - "user-agent": con._get_default_user_agent(), - }, - con.headers, - ) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_as_http_auth(self): - region = "us-west-2" - - import requests - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - auth = AWSV4SignerAuth(self.mock_session(), region) - con = RequestsHttpConnection(http_auth=auth) - prepared_request = requests.Request("GET", "http://localhost").prepare() - auth(prepared_request) - self.assertEqual(auth, con.session.auth) - self.assertIn("Authorization", prepared_request.headers) - self.assertIn("X-Amz-Date", prepared_request.headers) - self.assertIn("X-Amz-Security-Token", prepared_request.headers) - self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_region_is_null(self): - session = self.mock_session() - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(session, None) - assert str(e.value) == "Region cannot be empty" - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(session, "") - assert str(e.value) == "Region cannot be empty" - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_credentials_is_null(self): - region = "us-west-1" - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth(None, region) - assert str(e.value) == "Credentials cannot be empty" - - with pytest.raises(ValueError) as e: - AWSV4SignerAuth("", region) - assert str(e.value) == "Credentials cannot be empty" - - @pytest.mark.skipif( - sys.version_info < (3, 6), reason="AWSV4SignerAuth requires python3.6+" - ) - def test_aws_signer_when_service_is_specified(self): - region = "us-west-1" - service = "aoss" - - import requests - - from opensearchpy.helpers.signer import AWSV4SignerAuth - - auth = AWSV4SignerAuth(self.mock_session(), region, service) - con = RequestsHttpConnection(http_auth=auth) - prepared_request = requests.Request("GET", "http://localhost").prepare() - auth(prepared_request) - self.assertEqual(auth, con.session.auth) - self.assertIn("Authorization", prepared_request.headers) - self.assertIn("X-Amz-Date", prepared_request.headers) - self.assertIn("X-Amz-Security-Token", prepared_request.headers) - - def mock_session(self): - access_key = uuid.uuid4().hex - secret_key = uuid.uuid4().hex - token = uuid.uuid4().hex - dummy_session = Mock() - dummy_session.access_key = access_key - dummy_session.secret_key = secret_key - dummy_session.token = token - return dummy_session - - def test_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) - self.assertEqual(1, len(w)) - self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", - str(w[0].message), - ) - - self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = Urllib3HttpConnection( - use_ssl=True, verify_certs=False, ssl_show_warn=False - ) - self.assertEqual(0, len(w)) - - self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) - - def test_doesnt_use_https_if_not_specified(self): - con = Urllib3HttpConnection() - self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) - - def test_no_warning_when_using_ssl_context(self): - ctx = ssl.create_default_context() - with warnings.catch_warnings(record=True) as w: - Urllib3HttpConnection(ssl_context=ctx) - self.assertEqual(0, len(w)) - - def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self): - for kwargs in ( - {"ssl_show_warn": False}, - {"ssl_show_warn": True}, - {"verify_certs": True}, - {"verify_certs": False}, - {"ca_certs": "/path/to/certs"}, - {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, - ): - kwargs["ssl_context"] = ssl.create_default_context() - - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - - Urllib3HttpConnection(**kwargs) - - self.assertEqual(1, len(w)) - self.assertEqual( - "When using `ssl_context`, all other SSL related kwargs are ignored", - str(w[0].message), - ) - - def test_uses_given_ca_certs(self): - path = "/path/to/my/ca_certs.pem" - c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) - self.assertEqual(path, c.pool.ca_certs) - - def test_uses_default_ca_certs(self): - c = Urllib3HttpConnection(use_ssl=True) - self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) - - def test_uses_no_ca_certs(self): - c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) - self.assertIsNone(c.pool.ca_certs) - - @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): - con = self._get_mock_connection(connection_params={"http_compress": True}) - con.perform_request("GET", "/", body=b'{"example": "body"}') - - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - - self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) - self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - - def test_surrogatepass_into_bytes(self): - buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" - con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip - - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) - def test_recursion_error_reraised(self): - conn = Urllib3HttpConnection() - - def urlopen_raise(*_, **__): - raise RecursionError("Wasn't modified!") - - conn.pool.urlopen = urlopen_raise - - with pytest.raises(RecursionError) as e: - conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" - - -class TestRequestsConnection(TestCase): - def _get_mock_connection( - self, connection_params={}, status_code=200, response_body=b"{}" - ): - con = RequestsHttpConnection(**connection_params) - - def _dummy_send(*args, **kwargs): - dummy_response = Mock() - dummy_response.headers = {} - dummy_response.status_code = status_code - dummy_response.content = response_body - dummy_response.request = args[0] - dummy_response.cookies = {} - _dummy_send.call_args = (args, kwargs) - return dummy_response - - con.session.send = _dummy_send - return con - - def _get_request(self, connection, *args, **kwargs): - if "body" in kwargs: - kwargs["body"] = kwargs["body"].encode("utf-8") - - status, headers, data = connection.perform_request(*args, **kwargs) - self.assertEqual(200, status) - self.assertEqual("{}", data) - - timeout = kwargs.pop("timeout", connection.timeout) - args, kwargs = connection.session.send.call_args - self.assertEqual(timeout, kwargs["timeout"]) - self.assertEqual(1, len(args)) - return args[0] - - def test_custom_http_auth_is_allowed(self): - auth = AuthBase() - c = RequestsHttpConnection(http_auth=auth) - - self.assertEqual(auth, c.session.auth) - - def test_timeout_set(self): - con = RequestsHttpConnection(timeout=42) - self.assertEqual(42, con.timeout) - - def test_opaque_id(self): - con = RequestsHttpConnection(opaque_id="app-1") - self.assertEqual(con.headers["x-opaque-id"], "app-1") - - def test_no_http_compression(self): - con = self._get_mock_connection() - - self.assertFalse(con.http_compress) - self.assertNotIn("content-encoding", con.session.headers) - - con.perform_request("GET", "/") - - req = con.session.send.call_args[0][0] - self.assertNotIn("content-encoding", req.headers) - self.assertNotIn("accept-encoding", req.headers) - - def test_http_compression(self): - con = self._get_mock_connection( - {"http_compress": True}, - ) - - self.assertTrue(con.http_compress) - - # 'content-encoding' shouldn't be set at a session level. - # Should be applied only if the request is sent with a body. - self.assertNotIn("content-encoding", con.session.headers) - - con.perform_request("GET", "/", body=b"{}") - - req = con.session.send.call_args[0][0] - self.assertEqual(req.headers["content-encoding"], "gzip") - self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - - con.perform_request("GET", "/") - - req = con.session.send.call_args[0][0] - self.assertNotIn("content-encoding", req.headers) - self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") - - def test_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = self._get_mock_connection( - {"use_ssl": True, "url_prefix": "url", "verify_certs": False} - ) - self.assertEqual(1, len(w)) - self.assertEqual( - "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", - str(w[0].message), - ) - - request = self._get_request(con, "GET", "/") - - self.assertEqual("https://localhost:9200/url/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_uses_given_ca_certs(self): - path = "/path/to/my/ca_certs.pem" - c = RequestsHttpConnection(ca_certs=path) - self.assertEqual(path, c.session.verify) - - def test_uses_default_ca_certs(self): - c = RequestsHttpConnection() - self.assertEqual(Connection.default_ca_certs(), c.session.verify) - - def test_uses_no_ca_certs(self): - c = RequestsHttpConnection(verify_certs=False) - self.assertFalse(c.session.verify) - - def test_nowarn_when_uses_https_if_verify_certs_is_off(self): - with warnings.catch_warnings(record=True) as w: - con = self._get_mock_connection( - { - "use_ssl": True, - "url_prefix": "url", - "verify_certs": False, - "ssl_show_warn": False, - } - ) - self.assertEqual(0, len(w)) - - request = self._get_request(con, "GET", "/") - - self.assertEqual("https://localhost:9200/url/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_merge_headers(self): - con = self._get_mock_connection( - connection_params={"headers": {"h1": "v1", "h2": "v2"}} - ) - req = self._get_request(con, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) - self.assertEqual(req.headers["h1"], "v1") - self.assertEqual(req.headers["h2"], "v2p") - self.assertEqual(req.headers["h3"], "v3") - - def test_default_headers(self): - con = self._get_mock_connection() - req = self._get_request(con, "GET", "/") - self.assertEqual(req.headers["content-type"], "application/json") - self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) - - def test_custom_headers(self): - con = self._get_mock_connection() - req = self._get_request( - con, - "GET", - "/", - headers={ - "content-type": "application/x-ndjson", - "user-agent": "custom-agent/1.2.3", - }, - ) - self.assertEqual(req.headers["content-type"], "application/x-ndjson") - self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") - - def test_http_auth(self): - con = RequestsHttpConnection(http_auth="username:secret") - self.assertEqual(("username", "secret"), con.session.auth) - - def test_http_auth_tuple(self): - con = RequestsHttpConnection(http_auth=("username", "secret")) - self.assertEqual(("username", "secret"), con.session.auth) - - def test_http_auth_list(self): - con = RequestsHttpConnection(http_auth=["username", "secret"]) - self.assertEqual(("username", "secret"), con.session.auth) - - def test_repr(self): - con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) - self.assertEqual( - "", repr(con) - ) - - def test_conflict_error_is_returned_on_409(self): - con = self._get_mock_connection(status_code=409) - self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") - - def test_not_found_error_is_returned_on_404(self): - con = self._get_mock_connection(status_code=404) - self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") - - def test_request_error_is_returned_on_400(self): - con = self._get_mock_connection(status_code=400) - self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") - - @patch("opensearchpy.connection.base.logger") - def test_head_with_404_doesnt_get_logged(self, logger): - con = self._get_mock_connection(status_code=404) - self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") - self.assertEqual(0, logger.warning.call_count) - - @patch("opensearchpy.connection.base.tracer") - @patch("opensearchpy.connection.base.logger") - def test_failed_request_logs_and_traces(self, logger, tracer): - con = self._get_mock_connection( - response_body=b'{"answer": 42}', status_code=500 - ) - self.assertRaises( - TransportError, - con.perform_request, - "GET", - "/", - {"param": 42}, - "{}".encode("utf-8"), - ) - - # trace request - self.assertEqual(1, tracer.info.call_count) - # trace response - self.assertEqual(1, tracer.debug.call_count) - # log url and duration - self.assertEqual(1, logger.warning.call_count) - self.assertTrue( - re.match( - r"^GET http://localhost:9200/\?param=42 \[status:500 request:0.[0-9]{3}s\]", - logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], - ) - ) - - @patch("opensearchpy.connection.base.tracer") - @patch("opensearchpy.connection.base.logger") - def test_success_logs_and_traces(self, logger, tracer): - con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") - status, headers, data = con.perform_request( - "GET", - "/", - {"param": 42}, - """{"question": "what's that?"}""".encode("utf-8"), - ) - - # trace request - self.assertEqual(1, tracer.info.call_count) - self.assertEqual( - """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", - tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], - ) - # trace response - self.assertEqual(1, tracer.debug.call_count) - self.assertTrue( - re.match( - r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', - tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], - ) - ) - - # log url and duration - self.assertEqual(1, logger.info.call_count) - self.assertTrue( - re.match( - r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", - logger.info.call_args[0][0] % logger.info.call_args[0][1:], - ) - ) - # log request body and response - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - self.assertEqual('> {"question": "what\'s that?"}', req[0][0] % req[0][1:]) - self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) - - @patch("opensearchpy.connection.base.logger") - def test_uncompressed_body_logged(self, logger): - con = self._get_mock_connection(connection_params={"http_compress": True}) - con.perform_request("GET", "/", body=b'{"example": "body"}') - - self.assertEqual(2, logger.debug.call_count) - req, resp = logger.debug.call_args_list - self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) - self.assertEqual("< {}", resp[0][0] % resp[0][1:]) - - con = self._get_mock_connection( - connection_params={"http_compress": True}, - status_code=500, - response_body=b'{"hello":"world"}', - ) - with pytest.raises(TransportError): - con.perform_request("GET", "/", body=b'{"example": "body2"}') - - self.assertEqual(4, logger.debug.call_count) - _, _, req, resp = logger.debug.call_args_list - self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) - self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) - - def test_defaults(self): - con = self._get_mock_connection() - request = self._get_request(con, "GET", "/") - - self.assertEqual("http://localhost:9200/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_params_properly_encoded(self): - con = self._get_mock_connection() - request = self._get_request( - con, "GET", "/", params={"param": "value with spaces"} - ) - - self.assertEqual("http://localhost:9200/?param=value+with+spaces", request.url) - self.assertEqual("GET", request.method) - self.assertEqual(None, request.body) - - def test_body_attached(self): - con = self._get_mock_connection() - request = self._get_request(con, "GET", "/", body='{"answer": 42}') - - self.assertEqual("http://localhost:9200/", request.url) - self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - - def test_http_auth_attached(self): - con = self._get_mock_connection({"http_auth": "username:secret"}) - request = self._get_request(con, "GET", "/") - - self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") - - @patch("opensearchpy.connection.base.tracer") - def test_url_prefix(self, tracer): - con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) - request = self._get_request( - con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 - ) - - self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) - self.assertEqual("GET", request.method) - self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) - - # trace request - self.assertEqual(1, tracer.info.call_count) - self.assertEqual( - "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", - tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], - ) - - def test_surrogatepass_into_bytes(self): - buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" - con = self._get_mock_connection(response_body=buf) - status, headers, data = con.perform_request("GET", "/") - self.assertEqual(u"你好\uda6a", data) # fmt: skip - - @pytest.mark.skipif( - not reraise_exceptions, reason="RecursionError isn't defined in Python <3.5" - ) - def test_recursion_error_reraised(self): - conn = RequestsHttpConnection() - - def send_raise(*_, **__): - raise RecursionError("Wasn't modified!") - - conn.session.send = send_raise - - with pytest.raises(RecursionError) as e: - conn.perform_request("GET", "/") - assert str(e.value) == "Wasn't modified!" - - -@pytest.mark.skipif( - sys.version_info < (3, 0), - reason="http_server is only available from python 3.x", -) -class TestConnectionHttpServer: - """Tests the HTTP connection implementations against a live server E2E""" - - @classmethod - def setup_class(cls): - # Start server - cls.server = TestHTTPServer(port=8080) - cls.server.start() - - @classmethod - def teardown_class(cls): - # Stop server - cls.server.stop() - - def httpserver(self, conn, **kwargs): - status, headers, data = conn.perform_request("GET", "/", **kwargs) - data = json.loads(data) - return (status, data) - - def test_urllib3_connection(self): - # Defaults - conn = Urllib3HttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=False - conn = Urllib3HttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=False, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=True - conn = Urllib3HttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=True, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # Headers - conn = Urllib3HttpConnection( - "localhost", - port=8080, - use_ssl=False, - http_compress=True, - headers={"header1": "value1"}, - timeout=60, - ) - status, data = self.httpserver( - conn, headers={"header2": "value2", "header1": "override!"} - ) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "Header1": "override!", - "Header2": "value2", - "User-Agent": user_agent, - } - - def test_urllib3_connection_error(self): - conn = Urllib3HttpConnection("not.a.host.name") - with pytest.raises(ConnectionError): - conn.perform_request("GET", "/") - - def test_requests_connection(self): - # Defaults - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=False - conn = RequestsHttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=False, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["method"] == "GET" - assert data["headers"] == { - "Accept-Encoding": "identity", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # http_compress=True - conn = RequestsHttpConnection( - "localhost", port=8080, use_ssl=False, http_compress=True, timeout=60 - ) - status, data = self.httpserver(conn) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "User-Agent": user_agent, - } - - # Headers - conn = RequestsHttpConnection( - "localhost", - port=8080, - use_ssl=False, - http_compress=True, - headers={"header1": "value1"}, - timeout=60, - ) - status, data = self.httpserver( - conn, headers={"header2": "value2", "header1": "override!"} - ) - assert status == 200 - assert data["headers"] == { - "Accept-Encoding": "gzip,deflate", - "Content-Type": "application/json", - "Host": "localhost:8080", - "Header1": "override!", - "Header2": "value2", - "User-Agent": user_agent, - } - - def test_requests_connection_error(self): - conn = RequestsHttpConnection("not.a.host.name") - with pytest.raises(ConnectionError): - conn.perform_request("GET", "/") - - -@pytest.mark.skipif( - sys.version_info < (3, 0), - reason="http_server is only available from python 3.x", -) -class TestRequestsConnectionRedirect: - @classmethod - def setup_class(cls): - # Start servers - cls.server1 = TestHTTPServer(port=8080) - cls.server1.start() - cls.server2 = TestHTTPServer(port=8090) - cls.server2.start() - - @classmethod - def teardown_class(cls): - # Stop servers - cls.server2.stop() - cls.server1.stop() - - # allow_redirects = False - def test_redirect_failure_when_allow_redirect_false(self): - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - with pytest.raises(TransportError) as e: - conn.perform_request("GET", "/redirect", allow_redirects=False) - assert e.value.status_code == 302 - - # allow_redirects = True (Default) - def test_redirect_success_when_allow_redirect_true(self): - conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) - user_agent = conn._get_default_user_agent() - status, headers, data = conn.perform_request("GET", "/redirect") - assert status == 200 - data = json.loads(data) - assert data["headers"] == { - "Host": "localhost:8090", - "Accept-Encoding": "identity", - "User-Agent": user_agent, - } - - -def test_default_connection_is_returned_by_default(): - c = connections.Connections() - - con, con2 = object(), object() - c.add_connection("default", con) - - c.add_connection("not-default", con2) - - assert c.get_connection() is con - - -def test_get_connection_created_connection_if_needed(): - c = connections.Connections() - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - - default = c.get_connection() - local = c.get_connection("local") - - assert isinstance(default, OpenSearch) - assert isinstance(local, OpenSearch) - - assert [{"host": "opensearch.com"}] == default.transport.hosts - assert [{"host": "localhost"}] == local.transport.hosts - - -def test_configure_preserves_unchanged_connections(): - c = connections.Connections() - - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - default = c.get_connection() - local = c.get_connection("local") - - c.configure( - default={"hosts": ["not-opensearch.com"]}, local={"hosts": ["localhost"]} - ) - new_default = c.get_connection() - new_local = c.get_connection("local") - - assert new_local is local - assert new_default is not default - - -def test_remove_connection_removes_both_conn_and_conf(): - c = connections.Connections() - - c.configure(default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]}) - c.add_connection("local2", object()) - - c.remove_connection("default") - c.get_connection("local2") - c.remove_connection("local2") - - with raises(Exception): - c.get_connection("local2") - c.get_connection("default") - - -def test_create_connection_constructs_client(): - c = connections.Connections() - c.create_connection("testing", hosts=["opensearch.com"]) - - con = c.get_connection("testing") - assert [{"host": "opensearch.com"}] == con.transport.hosts - - -def test_create_connection_adds_our_serializer(): - c = connections.Connections() - c.create_connection("testing", hosts=["opensearch.com"]) - - assert c.get_connection("testing").transport.serializer is serializer.serializer diff --git a/opensearchpy/helpers/response/hit.pyi b/test_opensearchpy/test_connection/__init__.py similarity index 95% rename from opensearchpy/helpers/response/hit.pyi rename to test_opensearchpy/test_connection/__init__.py index ae3cdf00..392fa5bd 100644 --- a/opensearchpy/helpers/response/hit.pyi +++ b/test_opensearchpy/test_connection/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -23,7 +24,3 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - -from ..utils import AttrDict - -class Hit(AttrDict): ... diff --git a/test_opensearchpy/test_connection/test_base_connection.py b/test_opensearchpy/test_connection/test_base_connection.py new file mode 100644 index 00000000..45cc46fd --- /dev/null +++ b/test_opensearchpy/test_connection/test_base_connection.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import os +import sys +import warnings + +from opensearchpy.connection import Connection + +from ..test_cases import TestCase + +try: + from pytest import MonkeyPatch +except ImportError: # Old version of pytest for 2.7 and 3.5 + from _pytest.monkeypatch import MonkeyPatch + +from pytest import raises + +from opensearchpy import OpenSearch, serializer +from opensearchpy.connection import connections + + +class TestBaseConnection(TestCase): + def test_empty_warnings(self) -> None: + con = Connection() + with warnings.catch_warnings(record=True) as w: + con._raise_warnings(()) + con._raise_warnings([]) + + self.assertEqual(w, []) + + def test_raises_warnings(self) -> None: + con = Connection() + + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings(['299 OpenSearch-7.6.1-aa751 "this is deprecated"']) + + self.assertEqual([str(w.message) for w in warn], ["this is deprecated"]) + + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings( + [ + '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', + '299 OpenSearch-7.6.1-aa751 "this is also deprecated"', + '299 OpenSearch-7.6.1-aa751 "guess what? deprecated"', + ] + ) + + self.assertEqual( + [str(w.message) for w in warn], + ["this is also deprecated", "guess what? deprecated"], + ) + + def test_raises_warnings_when_folded(self) -> None: + con = Connection() + with warnings.catch_warnings(record=True) as warn: + con._raise_warnings( + [ + '299 OpenSearch-7.6.1-aa751 "warning",' + '299 OpenSearch-7.6.1-aa751 "folded"', + ] + ) + + self.assertEqual([str(w.message) for w in warn], ["warning", "folded"]) + + def test_ipv6_host_and_port(self) -> None: + for kwargs, expected_host in [ + ({"host": "::1"}, "http://[::1]:9200"), + ({"host": "::1", "port": 443}, "http://[::1]:443"), + ({"host": "::1", "use_ssl": True}, "https://[::1]:9200"), + ({"host": "127.0.0.1", "port": 1234}, "http://127.0.0.1:1234"), + ({"host": "localhost", "use_ssl": True}, "https://localhost:9200"), + ]: + conn = Connection(**kwargs) # type: ignore + assert conn.host == expected_host + + def test_compatibility_accept_header(self) -> None: + try: + conn = Connection() + assert "accept" not in conn.headers + + os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "0" + + conn = Connection() + assert "accept" not in conn.headers + + os.environ["ELASTIC_CLIENT_APIVERSIONING"] = "1" + + conn = Connection() + assert ( + conn.headers["accept"] + == "application/vnd.elasticsearch+json;compatible-with=7" + ) + finally: + os.environ.pop("ELASTIC_CLIENT_APIVERSIONING") + + def test_ca_certs_ssl_cert_file(self) -> None: + cert = "/path/to/clientcert.pem" + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setenv("SSL_CERT_FILE", cert) + assert Connection.default_ca_certs() == cert + + def test_ca_certs_ssl_cert_dir(self) -> None: + cert = "/path/to/clientcert/dir" + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setenv("SSL_CERT_DIR", cert) + assert Connection.default_ca_certs() == cert + + def test_ca_certs_certifi(self) -> None: + import certifi + + assert Connection.default_ca_certs() == certifi.where() + + def test_no_ca_certs(self) -> None: + with MonkeyPatch().context() as monkeypatch: + monkeypatch.setitem(sys.modules, "certifi", None) + assert Connection.default_ca_certs() is None + + def test_default_connection_is_returned_by_default(self) -> None: + c = connections.Connections() + + con, con2 = object(), object() + c.add_connection("default", con) + + c.add_connection("not-default", con2) + + assert c.get_connection() is con + + def test_get_connection_created_connection_if_needed(self) -> None: + c = connections.Connections() + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + + default = c.get_connection() + local = c.get_connection("local") + + assert isinstance(default, OpenSearch) + assert isinstance(local, OpenSearch) + + assert [{"host": "opensearch.com"}] == default.transport.hosts + assert [{"host": "localhost"}] == local.transport.hosts + + def test_configure_preserves_unchanged_connections(self) -> None: + c = connections.Connections() + + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + default = c.get_connection() + local = c.get_connection("local") + + c.configure( + default={"hosts": ["not-opensearch.com"]}, local={"hosts": ["localhost"]} + ) + new_default = c.get_connection() + new_local = c.get_connection("local") + + assert new_local is local + assert new_default is not default + + def test_remove_connection_removes_both_conn_and_conf(self) -> None: + c = connections.Connections() + + c.configure( + default={"hosts": ["opensearch.com"]}, local={"hosts": ["localhost"]} + ) + c.add_connection("local2", object()) + + c.remove_connection("default") + c.get_connection("local2") + c.remove_connection("local2") + + with raises(Exception): + c.get_connection("local2") + c.get_connection("default") + + def test_create_connection_constructs_client(self) -> None: + c = connections.Connections() + c.create_connection("testing", hosts=["opensearch.com"]) + + con = c.get_connection("testing") + assert [{"host": "opensearch.com"}] == con.transport.hosts + + def test_create_connection_adds_our_serializer(self) -> None: + c = connections.Connections() + c.create_connection("testing", hosts=["opensearch.com"]) + + assert c.get_connection("testing").transport.serializer is serializer.serializer diff --git a/test_opensearchpy/test_connection/test_requests_http_connection.py b/test_opensearchpy/test_connection/test_requests_http_connection.py new file mode 100644 index 00000000..7043ec54 --- /dev/null +++ b/test_opensearchpy/test_connection/test_requests_http_connection.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import json +import re +import uuid +import warnings +from typing import Any + +import pytest +from mock import Mock, patch +from requests.auth import AuthBase + +from opensearchpy.connection import Connection, RequestsHttpConnection +from opensearchpy.exceptions import ( + ConflictError, + NotFoundError, + RequestError, + TransportError, +) +from test_opensearchpy.TestHttpServer import TestHTTPServer + +from ..test_cases import TestCase + + +class TestRequestsHttpConnection(TestCase): + def _get_mock_connection( + self, + connection_params: Any = {}, + status_code: int = 200, + response_body: bytes = b"{}", + ) -> Any: + con = RequestsHttpConnection(**connection_params) + + def _dummy_send(*args: Any, **kwargs: Any) -> Any: + dummy_response = Mock() + dummy_response.headers = {} + dummy_response.status_code = status_code + dummy_response.content = response_body + dummy_response.request = args[0] + dummy_response.cookies = {} + _dummy_send.call_args = (args, kwargs) # type: ignore + return dummy_response + + con.session.send = _dummy_send # type: ignore + return con + + def _get_request(self, connection: Any, *args: Any, **kwargs: Any) -> Any: + if "body" in kwargs: + kwargs["body"] = kwargs["body"].encode("utf-8") + + status, headers, data = connection.perform_request(*args, **kwargs) + self.assertEqual(200, status) + self.assertEqual("{}", data) + + timeout = kwargs.pop("timeout", connection.timeout) + args, kwargs = connection.session.send.call_args + self.assertEqual(timeout, kwargs["timeout"]) + self.assertEqual(1, len(args)) + return args[0] + + def test_custom_http_auth_is_allowed(self) -> None: + auth = AuthBase() + c = RequestsHttpConnection(http_auth=auth) + + self.assertEqual(auth, c.session.auth) + + def test_timeout_set(self) -> None: + con = RequestsHttpConnection(timeout=42) + self.assertEqual(42, con.timeout) + + def test_opaque_id(self) -> None: + con = RequestsHttpConnection(opaque_id="app-1") + self.assertEqual(con.headers["x-opaque-id"], "app-1") + + def test_no_http_compression(self) -> None: + con = self._get_mock_connection() + + self.assertFalse(con.http_compress) + self.assertNotIn("content-encoding", con.session.headers) + + con.perform_request("GET", "/") + + req = con.session.send.call_args[0][0] + self.assertNotIn("content-encoding", req.headers) + self.assertNotIn("accept-encoding", req.headers) + + def test_http_compression(self) -> None: + con = self._get_mock_connection( + {"http_compress": True}, + ) + + self.assertTrue(con.http_compress) + + # 'content-encoding' shouldn't be set at a session level. + # Should be applied only if the request is sent with a body. + self.assertNotIn("content-encoding", con.session.headers) + + con.perform_request("GET", "/", body=b"{}") + + req = con.session.send.call_args[0][0] + self.assertEqual(req.headers["content-encoding"], "gzip") + self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") + + con.perform_request("GET", "/") + + req = con.session.send.call_args[0][0] + self.assertNotIn("content-encoding", req.headers) + self.assertEqual(req.headers["accept-encoding"], "gzip,deflate") + + def test_uses_https_if_verify_certs_is_off(self) -> None: + with warnings.catch_warnings(record=True) as w: + con = self._get_mock_connection( + {"use_ssl": True, "url_prefix": "url", "verify_certs": False} + ) + self.assertEqual(1, len(w)) + self.assertEqual( + "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + str(w[0].message), + ) + + request = self._get_request(con, "GET", "/") + + self.assertEqual("https://localhost:9200/url/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_uses_given_ca_certs(self) -> None: + path = "/path/to/my/ca_certs.pem" + c = RequestsHttpConnection(ca_certs=path) + self.assertEqual(path, c.session.verify) + + def test_uses_default_ca_certs(self) -> None: + c = RequestsHttpConnection() + self.assertEqual(Connection.default_ca_certs(), c.session.verify) + + def test_uses_no_ca_certs(self) -> None: + c = RequestsHttpConnection(verify_certs=False) + self.assertFalse(c.session.verify) + + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: + with warnings.catch_warnings(record=True) as w: + con = self._get_mock_connection( + { + "use_ssl": True, + "url_prefix": "url", + "verify_certs": False, + "ssl_show_warn": False, + } + ) + self.assertEqual(0, len(w)) + + request = self._get_request(con, "GET", "/") + + self.assertEqual("https://localhost:9200/url/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_merge_headers(self) -> None: + con = self._get_mock_connection( + connection_params={"headers": {"h1": "v1", "h2": "v2"}} + ) + req = self._get_request(con, "GET", "/", headers={"h2": "v2p", "h3": "v3"}) + self.assertEqual(req.headers["h1"], "v1") + self.assertEqual(req.headers["h2"], "v2p") + self.assertEqual(req.headers["h3"], "v3") + + def test_default_headers(self) -> None: + con = self._get_mock_connection() + req = self._get_request(con, "GET", "/") + self.assertEqual(req.headers["content-type"], "application/json") + self.assertEqual(req.headers["user-agent"], con._get_default_user_agent()) + + def test_custom_headers(self) -> None: + con = self._get_mock_connection() + req = self._get_request( + con, + "GET", + "/", + headers={ + "content-type": "application/x-ndjson", + "user-agent": "custom-agent/1.2.3", + }, + ) + self.assertEqual(req.headers["content-type"], "application/x-ndjson") + self.assertEqual(req.headers["user-agent"], "custom-agent/1.2.3") + + def test_http_auth(self) -> None: + con = RequestsHttpConnection(http_auth="username:secret") + self.assertEqual(("username", "secret"), con.session.auth) + + def test_http_auth_tuple(self) -> None: + con = RequestsHttpConnection(http_auth=("username", "secret")) + self.assertEqual(("username", "secret"), con.session.auth) + + def test_http_auth_list(self) -> None: + con = RequestsHttpConnection(http_auth=["username", "secret"]) + self.assertEqual(("username", "secret"), con.session.auth) + + def test_repr(self) -> None: + con = self._get_mock_connection({"host": "opensearchpy.com", "port": 443}) + self.assertEqual( + "", repr(con) + ) + + def test_conflict_error_is_returned_on_409(self) -> None: + con = self._get_mock_connection(status_code=409) + self.assertRaises(ConflictError, con.perform_request, "GET", "/", {}, "") + + def test_not_found_error_is_returned_on_404(self) -> None: + con = self._get_mock_connection(status_code=404) + self.assertRaises(NotFoundError, con.perform_request, "GET", "/", {}, "") + + def test_request_error_is_returned_on_400(self) -> None: + con = self._get_mock_connection(status_code=400) + self.assertRaises(RequestError, con.perform_request, "GET", "/", {}, "") + + @patch("opensearchpy.connection.base.logger") + def test_head_with_404_doesnt_get_logged(self, logger: Any) -> None: + con = self._get_mock_connection(status_code=404) + self.assertRaises(NotFoundError, con.perform_request, "HEAD", "/", {}, "") + self.assertEqual(0, logger.warning.call_count) + + @patch("opensearchpy.connection.base.tracer") + @patch("opensearchpy.connection.base.logger") + def test_failed_request_logs_and_traces(self, logger: Any, tracer: Any) -> None: + con = self._get_mock_connection( + response_body=b'{"answer": 42}', status_code=500 + ) + self.assertRaises( + TransportError, + con.perform_request, + "GET", + "/", + {"param": 42}, + "{}".encode("utf-8"), + ) + + # trace request + self.assertEqual(1, tracer.info.call_count) + # trace response + self.assertEqual(1, tracer.debug.call_count) + # log url and duration + self.assertEqual(1, logger.warning.call_count) + self.assertTrue( + re.match( + r"^GET http://localhost:9200/\?param=42 \[status:500 request:0.[0-9]{3}s\]", + logger.warning.call_args[0][0] % logger.warning.call_args[0][1:], + ) + ) + + @patch("opensearchpy.connection.base.tracer") + @patch("opensearchpy.connection.base.logger") + def test_success_logs_and_traces(self, logger: Any, tracer: Any) -> None: + con = self._get_mock_connection(response_body=b"""{"answer": "that's it!"}""") + status, headers, data = con.perform_request( + "GET", + "/", + {"param": 42}, + """{"question": "what's that?"}""".encode("utf-8"), + ) + + # trace request + self.assertEqual(1, tracer.info.call_count) + self.assertEqual( + """curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/?pretty¶m=42' -d '{\n "question": "what\\u0027s that?"\n}'""", + tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], + ) + # trace response + self.assertEqual(1, tracer.debug.call_count) + self.assertTrue( + re.match( + r'#\[200\] \(0.[0-9]{3}s\)\n#{\n# "answer": "that\\u0027s it!"\n#}', + tracer.debug.call_args[0][0] % tracer.debug.call_args[0][1:], + ) + ) + + # log url and duration + self.assertEqual(1, logger.info.call_count) + self.assertTrue( + re.match( + r"GET http://localhost:9200/\?param=42 \[status:200 request:0.[0-9]{3}s\]", + logger.info.call_args[0][0] % logger.info.call_args[0][1:], + ) + ) + # log request body and response + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + self.assertEqual('> {"question": "what\'s that?"}', req[0][0] % req[0][1:]) + self.assertEqual('< {"answer": "that\'s it!"}', resp[0][0] % resp[0][1:]) + + @patch("opensearchpy.connection.base.logger") + def test_uncompressed_body_logged(self, logger: Any) -> None: + con = self._get_mock_connection(connection_params={"http_compress": True}) + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + con = self._get_mock_connection( + connection_params={"http_compress": True}, + status_code=500, + response_body=b'{"hello":"world"}', + ) + with pytest.raises(TransportError): + con.perform_request("GET", "/", body=b'{"example": "body2"}') + + self.assertEqual(4, logger.debug.call_count) + _, _, req, resp = logger.debug.call_args_list + self.assertEqual('> {"example": "body2"}', req[0][0] % req[0][1:]) + self.assertEqual('< {"hello":"world"}', resp[0][0] % resp[0][1:]) + + def test_defaults(self) -> None: + con = self._get_mock_connection() + request = self._get_request(con, "GET", "/") + + self.assertEqual("http://localhost:9200/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_params_properly_encoded(self) -> None: + con = self._get_mock_connection() + request = self._get_request( + con, "GET", "/", params={"param": "value with spaces"} + ) + + self.assertEqual("http://localhost:9200/?param=value+with+spaces", request.url) + self.assertEqual("GET", request.method) + self.assertEqual(None, request.body) + + def test_body_attached(self) -> None: + con = self._get_mock_connection() + request = self._get_request(con, "GET", "/", body='{"answer": 42}') + + self.assertEqual("http://localhost:9200/", request.url) + self.assertEqual("GET", request.method) + self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + + def test_http_auth_attached(self) -> None: + con = self._get_mock_connection({"http_auth": "username:secret"}) + request = self._get_request(con, "GET", "/") + + self.assertEqual(request.headers["authorization"], "Basic dXNlcm5hbWU6c2VjcmV0") + + @patch("opensearchpy.connection.base.tracer") + def test_url_prefix(self, tracer: Any) -> None: + con = self._get_mock_connection({"url_prefix": "/some-prefix/"}) + request = self._get_request( + con, "GET", "/_search", body='{"answer": 42}', timeout=0.1 + ) + + self.assertEqual("http://localhost:9200/some-prefix/_search", request.url) + self.assertEqual("GET", request.method) + self.assertEqual('{"answer": 42}'.encode("utf-8"), request.body) + + # trace request + self.assertEqual(1, tracer.info.call_count) + self.assertEqual( + "curl -H 'Content-Type: application/json' -XGET 'http://localhost:9200/_search?pretty' -d '{\n \"answer\": 42\n}'", + tracer.info.call_args[0][0] % tracer.info.call_args[0][1:], + ) + + def test_surrogatepass_into_bytes(self) -> None: + buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" + con = self._get_mock_connection(response_body=buf) + status, headers, data = con.perform_request("GET", "/") + self.assertEqual(u"你好\uda6a", data) # fmt: skip + + def test_recursion_error_reraised(self) -> None: + conn = RequestsHttpConnection() + + def send_raise(*_: Any, **__: Any) -> Any: + raise RecursionError("Wasn't modified!") + + conn.session.send = send_raise # type: ignore + + with pytest.raises(RecursionError) as e: + conn.perform_request("GET", "/") + assert str(e.value) == "Wasn't modified!" + + def mock_session(self) -> Any: + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + del dummy_session.get_frozen_credentials + + return dummy_session + + def test_aws_signer_as_http_auth(self) -> None: + region = "us-west-2" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) + + def test_aws_signer_when_service_is_specified(self) -> None: + region = "us-west-1" + service = "aoss" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region, service) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + + @patch("opensearchpy.helpers.signer.AWSV4Signer.sign") + def test_aws_signer_signs_with_query_string(self, mock_sign: Any) -> None: + region = "us-west-1" + service = "aoss" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + auth = RequestsAWSV4SignerAuth(self.mock_session(), region, service) + prepared_request = requests.Request( + "GET", "http://localhost", params={"key1": "value1", "key2": "value2"} + ).prepare() + auth(prepared_request) + self.assertEqual(mock_sign.call_count, 1) + self.assertEqual( + mock_sign.call_args[0], + ("GET", "http://localhost/?key1=value1&key2=value2", None), + ) + + +class TestRequestsConnectionRedirect: + server1: TestHTTPServer + server2: TestHTTPServer + + @classmethod + def setup_class(cls) -> None: + # Start servers + cls.server1 = TestHTTPServer(port=8080) + cls.server1.start() + cls.server2 = TestHTTPServer(port=8090) + cls.server2.start() + + @classmethod + def teardown_class(cls) -> None: + # Stop servers + cls.server2.stop() + cls.server1.stop() + + # allow_redirects = False + def test_redirect_failure_when_allow_redirect_false(self) -> None: + conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) + with pytest.raises(TransportError) as e: + conn.perform_request("GET", "/redirect", allow_redirects=False) + assert e.value.status_code == 302 + + # allow_redirects = True (Default) + def test_redirect_success_when_allow_redirect_true(self) -> None: + conn = RequestsHttpConnection("localhost", port=8080, use_ssl=False, timeout=60) + user_agent = conn._get_default_user_agent() + status, headers, data = conn.perform_request("GET", "/redirect") + assert status == 200 + data = json.loads(data) + assert data["headers"] == { + "Host": "localhost:8090", + "Accept-Encoding": "identity", + "User-Agent": user_agent, + } + + +class TestSignerWithFrozenCredentials(TestRequestsHttpConnection): + def mock_session(self) -> Any: + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) + + return dummy_session + + def test_requests_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: + region = "us-west-2" + + import requests + + from opensearchpy.helpers.signer import RequestsAWSV4SignerAuth + + mock_session = self.mock_session() + + auth = RequestsAWSV4SignerAuth(mock_session, region) + con = RequestsHttpConnection(http_auth=auth) + prepared_request = requests.Request("GET", "http://localhost").prepare() + auth(prepared_request) + self.assertEqual(auth, con.session.auth) + self.assertIn("Authorization", prepared_request.headers) + self.assertIn("X-Amz-Date", prepared_request.headers) + self.assertIn("X-Amz-Security-Token", prepared_request.headers) + self.assertIn("X-Amz-Content-SHA256", prepared_request.headers) + mock_session.get_frozen_credentials.assert_called_once() diff --git a/test_opensearchpy/test_connection/test_urllib3_http_connection.py b/test_opensearchpy/test_connection/test_urllib3_http_connection.py new file mode 100644 index 00000000..9720283b --- /dev/null +++ b/test_opensearchpy/test_connection/test_urllib3_http_connection.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +# +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +import ssl +import uuid +import warnings +from gzip import GzipFile +from io import BytesIO +from platform import python_version +from typing import Any + +import pytest +import urllib3 +from mock import Mock, patch +from urllib3._collections import HTTPHeaderDict + +from opensearchpy import __versionstr__ +from opensearchpy.connection import Connection, Urllib3HttpConnection + +from ..test_cases import SkipTest, TestCase + + +class TestUrllib3HttpConnection(TestCase): + def _get_mock_connection( + self, connection_params: Any = {}, response_body: bytes = b"{}" + ) -> Any: + con = Urllib3HttpConnection(**connection_params) + + def _dummy_urlopen(*args: Any, **kwargs: Any) -> Any: + dummy_response = Mock() + dummy_response.headers = HTTPHeaderDict({}) + dummy_response.status = 200 + dummy_response.data = response_body + _dummy_urlopen.call_args = (args, kwargs) # type: ignore + return dummy_response + + con.pool.urlopen = _dummy_urlopen + return con + + def test_ssl_context(self) -> None: + try: + context = ssl.create_default_context() + except AttributeError: + # if create_default_context raises an AttributeError Exception + # it means SSLContext is not available for that version of python + # and we should skip this test. + raise SkipTest( + "Test test_ssl_context is skipped cause SSLContext is not available for this version of python" + ) + + con = Urllib3HttpConnection(use_ssl=True, ssl_context=context) + self.assertEqual(len(con.pool.conn_kw.keys()), 1) + self.assertIsInstance(con.pool.conn_kw["ssl_context"], ssl.SSLContext) + self.assertTrue(con.use_ssl) + + def test_opaque_id(self) -> None: + con = Urllib3HttpConnection(opaque_id="app-1") + self.assertEqual(con.headers["x-opaque-id"], "app-1") + + def test_no_http_compression(self) -> None: + con = self._get_mock_connection() + self.assertFalse(con.http_compress) + self.assertNotIn("accept-encoding", con.headers) + + con.perform_request("GET", "/") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + self.assertFalse(req_body) + self.assertNotIn("accept-encoding", kwargs["headers"]) + self.assertNotIn("content-encoding", kwargs["headers"]) + + def test_http_compression(self) -> None: + con = self._get_mock_connection({"http_compress": True}) + self.assertTrue(con.http_compress) + self.assertEqual(con.headers["accept-encoding"], "gzip,deflate") + + # 'content-encoding' shouldn't be set at a connection level. + # Should be applied only if the request is sent with a body. + self.assertNotIn("content-encoding", con.headers) + + con.perform_request("GET", "/", body=b"{}") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + buf = GzipFile(fileobj=BytesIO(req_body), mode="rb") + + self.assertEqual(buf.read(), b"{}") + self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") + self.assertEqual(kwargs["headers"]["content-encoding"], "gzip") + + con.perform_request("GET", "/") + + (_, _, req_body), kwargs = con.pool.urlopen.call_args + + self.assertFalse(req_body) + self.assertEqual(kwargs["headers"]["accept-encoding"], "gzip,deflate") + self.assertNotIn("content-encoding", kwargs["headers"]) + + def test_default_user_agent(self) -> None: + con = Urllib3HttpConnection() + self.assertEqual( + con._get_default_user_agent(), + "opensearch-py/%s (Python %s)" % (__versionstr__, python_version()), + ) + + def test_timeout_set(self) -> None: + con = Urllib3HttpConnection(timeout=42) + self.assertEqual(42, con.timeout) + + def test_keep_alive_is_on_by_default(self) -> None: + con = Urllib3HttpConnection() + self.assertEqual( + { + "connection": "keep-alive", + "content-type": "application/json", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth(self) -> None: + con = Urllib3HttpConnection(http_auth="username:secret") + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "connection": "keep-alive", + "content-type": "application/json", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth_tuple(self) -> None: + con = Urllib3HttpConnection(http_auth=("username", "secret")) + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "content-type": "application/json", + "connection": "keep-alive", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + def test_http_auth_list(self) -> None: + con = Urllib3HttpConnection(http_auth=["username", "secret"]) + self.assertEqual( + { + "authorization": "Basic dXNlcm5hbWU6c2VjcmV0", + "content-type": "application/json", + "connection": "keep-alive", + "user-agent": con._get_default_user_agent(), + }, + con.headers, + ) + + @patch( + "urllib3.HTTPConnectionPool.urlopen", + return_value=Mock(status=200, headers=HTTPHeaderDict({}), data=b"{}"), + ) + def test_aws_signer_as_http_auth_adds_headers(self, mock_open: Any) -> None: + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), "us-west-2") + con = Urllib3HttpConnection(http_auth=auth, headers={"x": "y"}) + con.perform_request("GET", "/") + self.assertEqual(mock_open.call_count, 1) + headers = mock_open.call_args[1]["headers"] + self.assertEqual(headers["x"], "y") + self.assertTrue( + headers["Authorization"].startswith("AWS4-HMAC-SHA256 Credential=") + ) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + + def test_aws_signer_as_http_auth(self) -> None: + region = "us-west-2" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), region) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + + def test_aws_signer_when_region_is_null(self) -> None: + session = self.mock_session() + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(session, None) + assert str(e.value) == "Region cannot be empty" + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(session, "") + assert str(e.value) == "Region cannot be empty" + + def test_aws_signer_when_credentials_is_null(self) -> None: + region = "us-west-1" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth(None, region) + assert str(e.value) == "Credentials cannot be empty" + + with pytest.raises(ValueError) as e: + Urllib3AWSV4SignerAuth("", region) + assert str(e.value) == "Credentials cannot be empty" + + def test_aws_signer_when_service_is_specified(self) -> None: + region = "us-west-1" + service = "aoss" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + auth = Urllib3AWSV4SignerAuth(self.mock_session(), region, service) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + + def mock_session(self) -> Any: + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + del dummy_session.get_frozen_credentials + + return dummy_session + + def test_uses_https_if_verify_certs_is_off(self) -> None: + with warnings.catch_warnings(record=True) as w: + con = Urllib3HttpConnection(use_ssl=True, verify_certs=False) + self.assertEqual(1, len(w)) + self.assertEqual( + "Connecting to https://localhost:9200 using SSL with verify_certs=False is insecure.", + str(w[0].message), + ) + + self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) + + def test_nowarn_when_uses_https_if_verify_certs_is_off(self) -> None: + with warnings.catch_warnings(record=True) as w: + con = Urllib3HttpConnection( + use_ssl=True, verify_certs=False, ssl_show_warn=False + ) + self.assertEqual(0, len(w)) + + self.assertIsInstance(con.pool, urllib3.HTTPSConnectionPool) + + def test_doesnt_use_https_if_not_specified(self) -> None: + con = Urllib3HttpConnection() + self.assertIsInstance(con.pool, urllib3.HTTPConnectionPool) + + def test_no_warning_when_using_ssl_context(self) -> None: + ctx = ssl.create_default_context() + with warnings.catch_warnings(record=True) as w: + Urllib3HttpConnection(ssl_context=ctx) + self.assertEqual(0, len(w)) + + def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> None: + kwargs: Any + for kwargs in ( + {"ssl_show_warn": False}, + {"ssl_show_warn": True}, + {"verify_certs": True}, + {"verify_certs": False}, + {"ca_certs": "/path/to/certs"}, + {"ssl_show_warn": True, "ca_certs": "/path/to/certs"}, + ): + kwargs["ssl_context"] = ssl.create_default_context() + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + + Urllib3HttpConnection(**kwargs) + + self.assertEqual(1, len(w)) + self.assertEqual( + "When using `ssl_context`, all other SSL related kwargs are ignored", + str(w[0].message), + ) + + def test_uses_given_ca_certs(self) -> None: + path = "/path/to/my/ca_certs.pem" + c = Urllib3HttpConnection(use_ssl=True, ca_certs=path) + self.assertEqual(path, c.pool.ca_certs) + + def test_uses_default_ca_certs(self) -> None: + c = Urllib3HttpConnection(use_ssl=True) + self.assertEqual(Connection.default_ca_certs(), c.pool.ca_certs) + + def test_uses_no_ca_certs(self) -> None: + c = Urllib3HttpConnection(use_ssl=True, verify_certs=False) + self.assertIsNone(c.pool.ca_certs) + + @patch("opensearchpy.connection.base.logger") + def test_uncompressed_body_logged(self, logger: Any) -> None: + con = self._get_mock_connection(connection_params={"http_compress": True}) + con.perform_request("GET", "/", body=b'{"example": "body"}') + + self.assertEqual(2, logger.debug.call_count) + req, resp = logger.debug.call_args_list + + self.assertEqual('> {"example": "body"}', req[0][0] % req[0][1:]) + self.assertEqual("< {}", resp[0][0] % resp[0][1:]) + + def test_surrogatepass_into_bytes(self) -> None: + buf = b"\xe4\xbd\xa0\xe5\xa5\xbd\xed\xa9\xaa" + con = self._get_mock_connection(response_body=buf) + status, headers, data = con.perform_request("GET", "/") + self.assertEqual(u"你好\uda6a", data) # fmt: skip + + def test_recursion_error_reraised(self) -> None: + conn = Urllib3HttpConnection() + + def urlopen_raise(*_: Any, **__: Any) -> Any: + raise RecursionError("Wasn't modified!") + + conn.pool.urlopen = urlopen_raise + + with pytest.raises(RecursionError) as e: + conn.perform_request("GET", "/") + assert str(e.value) == "Wasn't modified!" + + +class TestSignerWithFrozenCredentials(TestUrllib3HttpConnection): + def mock_session(self) -> Any: + access_key = uuid.uuid4().hex + secret_key = uuid.uuid4().hex + token = uuid.uuid4().hex + dummy_session = Mock() + dummy_session.access_key = access_key + dummy_session.secret_key = secret_key + dummy_session.token = token + dummy_session.get_frozen_credentials = Mock(return_value=dummy_session) + + return dummy_session + + def test_urllib3_http_connection_aws_signer_frozen_credentials_as_http_auth( + self, + ) -> None: + region = "us-west-2" + + from opensearchpy.helpers.signer import Urllib3AWSV4SignerAuth + + mock_session = self.mock_session() + + auth = Urllib3AWSV4SignerAuth(mock_session, region) + headers = auth("GET", "http://localhost", None) + self.assertIn("Authorization", headers) + self.assertIn("X-Amz-Date", headers) + self.assertIn("X-Amz-Security-Token", headers) + self.assertIn("X-Amz-Content-SHA256", headers) + mock_session.get_frozen_credentials.assert_called_once() diff --git a/test_opensearchpy/test_connection_pool.py b/test_opensearchpy/test_connection_pool.py index 02686e44..45afd93e 100644 --- a/test_opensearchpy/test_connection_pool.py +++ b/test_opensearchpy/test_connection_pool.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,6 +27,7 @@ import time +from typing import Any from opensearchpy.connection import Connection from opensearchpy.connection_pool import ( @@ -39,16 +41,16 @@ class TestConnectionPool(TestCase): - def test_dummy_cp_raises_exception_on_more_connections(self): + def test_dummy_cp_raises_exception_on_more_connections(self) -> None: self.assertRaises(ImproperlyConfigured, DummyConnectionPool, []) self.assertRaises( ImproperlyConfigured, DummyConnectionPool, [object(), object()] ) - def test_raises_exception_when_no_connections_defined(self): + def test_raises_exception_when_no_connections_defined(self) -> None: self.assertRaises(ImproperlyConfigured, ConnectionPool, []) - def test_default_round_robin(self): + def test_default_round_robin(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) connections = set() @@ -56,7 +58,7 @@ def test_default_round_robin(self): connections.add(pool.get_connection()) self.assertEqual(connections, set(range(100))) - def test_disable_shuffling(self): + def test_disable_shuffling(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)], randomize_hosts=False) connections = [] @@ -64,9 +66,9 @@ def test_disable_shuffling(self): connections.append(pool.get_connection()) self.assertEqual(connections, list(range(100))) - def test_selectors_have_access_to_connection_opts(self): + def test_selectors_have_access_to_connection_opts(self) -> None: class MySelector(RoundRobinSelector): - def select(self, connections): + def select(self, connections: Any) -> Any: return self.connection_opts[ super(MySelector, self).select(connections) ]["actual"] @@ -82,7 +84,7 @@ def select(self, connections): connections.append(pool.get_connection()) self.assertEqual(connections, [x * x for x in range(100)]) - def test_dead_nodes_are_removed_from_active_connections(self): + def test_dead_nodes_are_removed_from_active_connections(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -91,7 +93,7 @@ def test_dead_nodes_are_removed_from_active_connections(self): self.assertEqual(1, pool.dead.qsize()) self.assertEqual((now + 60, 42), pool.dead.get()) - def test_connection_is_skipped_when_dead(self): + def test_connection_is_skipped_when_dead(self) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.mark_dead(0) @@ -100,7 +102,7 @@ def test_connection_is_skipped_when_dead(self): [pool.get_connection(), pool.get_connection(), pool.get_connection()], ) - def test_new_connection_is_not_marked_dead(self): + def test_new_connection_is_not_marked_dead(self) -> None: # Create 10 connections pool = ConnectionPool([(Connection(), {}) for _ in range(10)]) @@ -111,7 +113,9 @@ def test_new_connection_is_not_marked_dead(self): # Nothing should be marked dead self.assertEqual(0, len(pool.dead_count)) - def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self): + def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible( + self, + ) -> None: pool = ConnectionPool([(x, {}) for x in range(2)]) pool.dead_count[0] = 1 pool.mark_dead(0) # failed twice, longer timeout @@ -121,7 +125,7 @@ def test_connection_is_forcibly_resurrected_when_no_live_ones_are_availible(self self.assertEqual(1, pool.get_connection()) self.assertEqual([1], pool.connections) - def test_connection_is_resurrected_after_its_timeout(self): + def test_connection_is_resurrected_after_its_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() @@ -130,7 +134,7 @@ def test_connection_is_resurrected_after_its_timeout(self): self.assertEqual(42, pool.connections[-1]) self.assertEqual(100, len(pool.connections)) - def test_force_resurrect_always_returns_a_connection(self): + def test_force_resurrect_always_returns_a_connection(self) -> None: pool = ConnectionPool([(0, {})]) pool.connections = [] @@ -138,7 +142,7 @@ def test_force_resurrect_always_returns_a_connection(self): self.assertEqual([], pool.connections) self.assertTrue(pool.dead.empty()) - def test_already_failed_connection_has_longer_timeout(self): + def test_already_failed_connection_has_longer_timeout(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 @@ -147,7 +151,7 @@ def test_already_failed_connection_has_longer_timeout(self): self.assertEqual(3, pool.dead_count[42]) self.assertEqual((now + 4 * 60, 42), pool.dead.get()) - def test_timeout_for_failed_connections_is_limitted(self): + def test_timeout_for_failed_connections_is_limitted(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 245 @@ -156,7 +160,7 @@ def test_timeout_for_failed_connections_is_limitted(self): self.assertEqual(246, pool.dead_count[42]) self.assertEqual((now + 32 * 60, 42), pool.dead.get()) - def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self): + def test_dead_count_is_wiped_clean_for_connection_if_marked_live(self) -> None: pool = ConnectionPool([(x, {}) for x in range(100)]) now = time.time() pool.dead_count[42] = 2 diff --git a/test_opensearchpy/test_exceptions.py b/test_opensearchpy/test_exceptions.py index 77a97a91..26e9e044 100644 --- a/test_opensearchpy/test_exceptions.py +++ b/test_opensearchpy/test_exceptions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -31,7 +32,7 @@ class TestTransformError(TestCase): - def test_transform_error_parse_with_error_reason(self): + def test_transform_error_parse_with_error_reason(self) -> None: e = TransportError( 500, "InternalServerError", @@ -42,7 +43,7 @@ def test_transform_error_parse_with_error_reason(self): str(e), "TransportError(500, 'InternalServerError', 'error reason')" ) - def test_transform_error_parse_with_error_string(self): + def test_transform_error_parse_with_error_string(self) -> None: e = TransportError( 500, "InternalServerError", {"error": "something error message"} ) diff --git a/test_opensearchpy/test_helpers/__init__.py b/test_opensearchpy/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_helpers/__init__.py +++ b/test_opensearchpy/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_helpers/conftest.py b/test_opensearchpy/test_helpers/conftest.py index 9c93ccd0..09778000 100644 --- a/test_opensearchpy/test_helpers/conftest.py +++ b/test_opensearchpy/test_helpers/conftest.py @@ -26,24 +26,26 @@ # under the License. +from typing import Any + from mock import Mock from pytest import fixture from opensearchpy.connection.connections import add_connection, connections -@fixture -def mock_client(dummy_response): +@fixture # type: ignore +def mock_client(dummy_response: Any) -> Any: client = Mock() client.search.return_value = dummy_response add_connection("mock", client) yield client - connections._conn = {} + connections._conns = {} connections._kwargs = {} -@fixture -def dummy_response(): +@fixture # type: ignore +def dummy_response() -> Any: return { "_shards": {"failed": 0, "successful": 10, "total": 10}, "hits": { @@ -91,8 +93,8 @@ def dummy_response(): } -@fixture -def aggs_search(): +@fixture # type: ignore +def aggs_search() -> Any: from opensearchpy import Search s = Search(index="flat-git") @@ -106,8 +108,8 @@ def aggs_search(): return s -@fixture -def aggs_data(): +@fixture # type: ignore +def aggs_data() -> Any: return { "took": 4, "timed_out": False, diff --git a/test_opensearchpy/test_helpers/test_actions.py b/test_opensearchpy/test_helpers/test_actions.py index 3538ae28..739e8647 100644 --- a/test_opensearchpy/test_helpers/test_actions.py +++ b/test_opensearchpy/test_helpers/test_actions.py @@ -28,6 +28,7 @@ import threading import time +from typing import Any import mock import pytest @@ -40,19 +41,19 @@ lock_side_effect = threading.Lock() -def mock_process_bulk_chunk(*args, **kwargs): +def mock_process_bulk_chunk(*args: Any, **kwargs: Any) -> Any: """ Threadsafe way of mocking process bulk chunk: https://stackoverflow.com/questions/39332139/thread-safe-version-of-mock-call-count """ with lock_side_effect: - mock_process_bulk_chunk.call_count += 1 + mock_process_bulk_chunk.call_count += 1 # type: ignore time.sleep(0.1) return [] -mock_process_bulk_chunk.call_count = 0 +mock_process_bulk_chunk.call_count = 0 # type: ignore class TestParallelBulk(TestCase): @@ -60,21 +61,21 @@ class TestParallelBulk(TestCase): "opensearchpy.helpers.actions._process_bulk_chunk", side_effect=mock_process_bulk_chunk, ) - def test_all_chunks_sent(self, _process_bulk_chunk): + def test_all_chunks_sent(self, _process_bulk_chunk: Any) -> None: actions = ({"x": i} for i in range(100)) list(helpers.parallel_bulk(OpenSearch(), actions, chunk_size=2)) - self.assertEqual(50, mock_process_bulk_chunk.call_count) + self.assertEqual(50, mock_process_bulk_chunk.call_count) # type: ignore - @pytest.mark.skip + @pytest.mark.skip # type: ignore @mock.patch( "opensearchpy.helpers.actions._process_bulk_chunk", # make sure we spend some time in the thread side_effect=lambda *a: [ - (True, time.sleep(0.001) or threading.current_thread().ident) + (True, time.sleep(0.001) or threading.current_thread().ident) # type: ignore ], ) - def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): + def test_chunk_sent_from_different_threads(self, _process_bulk_chunk: Any) -> None: actions = ({"x": i} for i in range(100)) results = list( helpers.parallel_bulk(OpenSearch(), actions, thread_count=10, chunk_size=2) @@ -83,16 +84,16 @@ def test_chunk_sent_from_different_threads(self, _process_bulk_chunk): class TestChunkActions(TestCase): - def setup_method(self, _): - self.actions = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip + def setup_method(self, _: Any) -> None: + self.actions: Any = [({"index": {}}, {"some": u"datá", "i": i}) for i in range(100)] # fmt: skip - def test_expand_action(self): + def test_expand_action(self) -> None: self.assertEqual(helpers.expand_action({}), ({"index": {}}, {})) self.assertEqual( helpers.expand_action({"key": "val"}), ({"index": {}}, {"key": "val"}) ) - def test_expand_action_actions(self): + def test_expand_action_actions(self) -> None: self.assertEqual( helpers.expand_action( {"_op_type": "delete", "_id": "id", "_index": "index"} @@ -123,7 +124,7 @@ def test_expand_action_actions(self): ({"create": {"_id": "id", "_index": "index"}}, {"key": "val"}), ) - def test_expand_action_options(self): + def test_expand_action_options(self) -> None: for option in ( "_id", "_index", @@ -154,7 +155,7 @@ def test_expand_action_options(self): ({"index": {action_option: 0}}, {"key": "val"}), ) - def test__source_metadata_or_source(self): + def test__source_metadata_or_source(self) -> None: self.assertEqual( helpers.expand_action({"_source": {"key": "val"}}), ({"index": {}}, {"key": "val"}), @@ -182,7 +183,7 @@ def test__source_metadata_or_source(self): ({"update": {}}, {"key2": "val2"}), ) - def test_chunks_are_chopped_by_byte_size(self): + def test_chunks_are_chopped_by_byte_size(self) -> None: self.assertEqual( 100, len( @@ -190,7 +191,7 @@ def test_chunks_are_chopped_by_byte_size(self): ), ) - def test_chunks_are_chopped_by_chunk_size(self): + def test_chunks_are_chopped_by_chunk_size(self) -> None: self.assertEqual( 10, len( @@ -200,7 +201,7 @@ def test_chunks_are_chopped_by_chunk_size(self): ), ) - def test_chunks_are_chopped_by_byte_size_properly(self): + def test_chunks_are_chopped_by_byte_size_properly(self) -> None: max_byte_size = 170 chunks = list( helpers._chunk_actions( @@ -215,7 +216,7 @@ def test_chunks_are_chopped_by_byte_size_properly(self): class TestExpandActions(TestCase): - def test_string_actions_are_marked_as_simple_inserts(self): + def test_string_actions_are_marked_as_simple_inserts(self) -> None: self.assertEqual( ('{"index":{}}', "whatever"), helpers.expand_action("whatever") ) diff --git a/test_opensearchpy/test_helpers/test_aggs.py b/test_opensearchpy/test_helpers/test_aggs.py index 13059ccc..8a23c218 100644 --- a/test_opensearchpy/test_helpers/test_aggs.py +++ b/test_opensearchpy/test_helpers/test_aggs.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -29,14 +30,14 @@ from opensearchpy.helpers import aggs, query -def test_repr(): +def test_repr() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) assert "Terms(aggs={'max_score': Max(field='score')}, field='tags')" == repr(a) -def test_meta(): +def test_meta() -> None: max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -49,7 +50,7 @@ def test_meta(): } == a.to_dict() -def test_meta_from_dict(): +def test_meta_from_dict() -> None: max_score = aggs.Max(field="score") a = aggs.A( "terms", field="tags", aggs={"max_score": max_score}, meta={"some": "metadata"} @@ -58,14 +59,14 @@ def test_meta_from_dict(): assert aggs.A(a.to_dict()) == a -def test_A_creates_proper_agg(): +def test_A_creates_proper_agg() -> None: a = aggs.A("terms", field="tags") assert isinstance(a, aggs.Terms) assert a._params == {"field": "tags"} -def test_A_handles_nested_aggs_properly(): +def test_A_handles_nested_aggs_properly() -> None: max_score = aggs.Max(field="score") a = aggs.A("terms", field="tags", aggs={"max_score": max_score}) @@ -73,12 +74,12 @@ def test_A_handles_nested_aggs_properly(): assert a._params == {"field": "tags", "aggs": {"max_score": max_score}} -def test_A_passes_aggs_through(): +def test_A_passes_aggs_through() -> None: a = aggs.A("terms", field="tags") assert aggs.A(a) is a -def test_A_from_dict(): +def test_A_from_dict() -> None: d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -94,7 +95,7 @@ def test_A_from_dict(): assert a.aggs.per_author == aggs.A("terms", field="author.raw") -def test_A_fails_with_incorrect_dict(): +def test_A_fails_with_incorrect_dict() -> None: correct_d = { "terms": {"field": "tags"}, "aggs": {"per_author": {"terms": {"field": "author.raw"}}}, @@ -114,14 +115,14 @@ def test_A_fails_with_incorrect_dict(): aggs.A(d) -def test_A_fails_with_agg_and_params(): +def test_A_fails_with_agg_and_params() -> None: a = aggs.A("terms", field="tags") with raises(Exception): aggs.A(a, field="score") -def test_buckets_are_nestable(): +def test_buckets_are_nestable() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -130,7 +131,7 @@ def test_buckets_are_nestable(): assert a.aggs == {"per_author": b} -def test_metric_inside_buckets(): +def test_metric_inside_buckets() -> None: a = aggs.Terms(field="tags") b = a.metric("max_score", "max", field="score") @@ -139,7 +140,7 @@ def test_metric_inside_buckets(): assert a.aggs["max_score"] == aggs.Max(field="score") -def test_buckets_equals_counts_subaggs(): +def test_buckets_equals_counts_subaggs() -> None: a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") b = aggs.Terms(field="tags") @@ -147,7 +148,7 @@ def test_buckets_equals_counts_subaggs(): assert a != b -def test_buckets_to_dict(): +def test_buckets_to_dict() -> None: a = aggs.Terms(field="tags") a.bucket("per_author", "terms", field="author.raw") @@ -165,7 +166,7 @@ def test_buckets_to_dict(): } == a.to_dict() -def test_nested_buckets_are_reachable_as_getitem(): +def test_nested_buckets_are_reachable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a.bucket("per_author", "terms", field="author.raw") @@ -173,14 +174,14 @@ def test_nested_buckets_are_reachable_as_getitem(): assert a["per_author"] == b -def test_nested_buckets_are_settable_as_getitem(): +def test_nested_buckets_are_settable_as_getitem() -> None: a = aggs.Terms(field="tags") b = a["per_author"] = aggs.A("terms", field="author.raw") assert a.aggs["per_author"] is b -def test_filter_can_be_instantiated_using_positional_args(): +def test_filter_can_be_instantiated_using_positional_args() -> None: a = aggs.Filter(query.Q("term", f=42)) assert {"filter": {"term": {"f": 42}}} == a.to_dict() @@ -188,7 +189,7 @@ def test_filter_can_be_instantiated_using_positional_args(): assert a == aggs.A("filter", query.Q("term", f=42)) -def test_filter_aggregation_as_nested_agg(): +def test_filter_aggregation_as_nested_agg() -> None: a = aggs.Terms(field="tags") a.bucket("filtered", "filter", query.Q("term", f=42)) @@ -198,7 +199,7 @@ def test_filter_aggregation_as_nested_agg(): } == a.to_dict() -def test_filter_aggregation_with_nested_aggs(): +def test_filter_aggregation_with_nested_aggs() -> None: a = aggs.Filter(query.Q("term", f=42)) a.bucket("testing", "terms", field="tags") @@ -208,7 +209,7 @@ def test_filter_aggregation_with_nested_aggs(): } == a.to_dict() -def test_filters_correctly_identifies_the_hash(): +def test_filters_correctly_identifies_the_hash() -> None: a = aggs.A( "filters", filters={ @@ -228,7 +229,7 @@ def test_filters_correctly_identifies_the_hash(): assert a.filters.group_a == query.Q("term", group="a") -def test_bucket_sort_agg(): +def test_bucket_sort_agg() -> None: bucket_sort_agg = aggs.BucketSort(sort=[{"total_sales": {"order": "desc"}}], size=3) assert bucket_sort_agg.to_dict() == { "bucket_sort": {"sort": [{"total_sales": {"order": "desc"}}], "size": 3} @@ -253,7 +254,7 @@ def test_bucket_sort_agg(): } == a.to_dict() -def test_bucket_sort_agg_only_trnunc(): +def test_bucket_sort_agg_only_trnunc() -> None: bucket_sort_agg = aggs.BucketSort(**{"from": 1, "size": 1}) assert bucket_sort_agg.to_dict() == {"bucket_sort": {"from": 1, "size": 1}} @@ -265,25 +266,25 @@ def test_bucket_sort_agg_only_trnunc(): } == a.to_dict() -def test_geohash_grid_aggregation(): +def test_geohash_grid_aggregation() -> None: a = aggs.GeohashGrid(**{"field": "centroid", "precision": 3}) assert {"geohash_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_geotile_grid_aggregation(): +def test_geotile_grid_aggregation() -> None: a = aggs.GeotileGrid(**{"field": "centroid", "precision": 3}) assert {"geotile_grid": {"field": "centroid", "precision": 3}} == a.to_dict() -def test_boxplot_aggregation(): +def test_boxplot_aggregation() -> None: a = aggs.Boxplot(field="load_time") assert {"boxplot": {"field": "load_time"}} == a.to_dict() -def test_rare_terms_aggregation(): +def test_rare_terms_aggregation() -> None: a = aggs.RareTerms(field="the-field") a.bucket("total_sales", "sum", field="price") a.bucket( @@ -304,18 +305,18 @@ def test_rare_terms_aggregation(): } == a.to_dict() -def test_variable_width_histogram_aggregation(): +def test_variable_width_histogram_aggregation() -> None: a = aggs.VariableWidthHistogram(field="price", buckets=2) assert {"variable_width_histogram": {"buckets": 2, "field": "price"}} == a.to_dict() -def test_median_absolute_deviation_aggregation(): +def test_median_absolute_deviation_aggregation() -> None: a = aggs.MedianAbsoluteDeviation(field="rating") assert {"median_absolute_deviation": {"field": "rating"}} == a.to_dict() -def test_t_test_aggregation(): +def test_t_test_aggregation() -> None: a = aggs.TTest( a={"field": "startup_time_before"}, b={"field": "startup_time_after"}, @@ -331,14 +332,14 @@ def test_t_test_aggregation(): } == a.to_dict() -def test_inference_aggregation(): +def test_inference_aggregation() -> None: a = aggs.Inference(model_id="model-id", buckets_path={"agg_name": "agg_name"}) assert { "inference": {"buckets_path": {"agg_name": "agg_name"}, "model_id": "model-id"} } == a.to_dict() -def test_moving_percentiles_aggregation(): +def test_moving_percentiles_aggregation() -> None: a = aggs.DateHistogram() a.bucket("the_percentile", "percentiles", field="price", percents=[1.0, 99.0]) a.pipeline( @@ -358,7 +359,7 @@ def test_moving_percentiles_aggregation(): } == a.to_dict() -def test_normalize_aggregation(): +def test_normalize_aggregation() -> None: a = aggs.Normalize(buckets_path="normalized", method="percent_of_sum") assert { "normalize": {"buckets_path": "normalized", "method": "percent_of_sum"} diff --git a/test_opensearchpy/test_helpers/test_analysis.py b/test_opensearchpy/test_helpers/test_analysis.py index 49a1d1fd..0226ee48 100644 --- a/test_opensearchpy/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_helpers/test_analysis.py @@ -30,13 +30,13 @@ from opensearchpy.helpers import analysis -def test_analyzer_serializes_as_name(): +def test_analyzer_serializes_as_name() -> None: a = analysis.analyzer("my_analyzer") assert "my_analyzer" == a.to_dict() -def test_analyzer_has_definition(): +def test_analyzer_has_definition() -> None: a = analysis.CustomAnalyzer( "my_analyzer", tokenizer="keyword", filter=["lowercase"] ) @@ -48,7 +48,7 @@ def test_analyzer_has_definition(): } == a.get_definition() -def test_simple_multiplexer_filter(): +def test_simple_multiplexer_filter() -> None: a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -76,7 +76,7 @@ def test_simple_multiplexer_filter(): } == a.get_analysis_definition() -def test_multiplexer_with_custom_filter(): +def test_multiplexer_with_custom_filter() -> None: a = analysis.analyzer( "my_analyzer", tokenizer="keyword", @@ -107,7 +107,7 @@ def test_multiplexer_with_custom_filter(): } == a.get_analysis_definition() -def test_conditional_token_filter(): +def test_conditional_token_filter() -> None: a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -144,7 +144,7 @@ def test_conditional_token_filter(): } == a.get_analysis_definition() -def test_conflicting_nested_filters_cause_error(): +def test_conflicting_nested_filters_cause_error() -> None: a = analysis.analyzer( "my_cond", tokenizer=analysis.tokenizer("keyword"), @@ -166,13 +166,13 @@ def test_conflicting_nested_filters_cause_error(): a.get_analysis_definition() -def test_normalizer_serializes_as_name(): +def test_normalizer_serializes_as_name() -> None: n = analysis.normalizer("my_normalizer") assert "my_normalizer" == n.to_dict() -def test_normalizer_has_definition(): +def test_normalizer_has_definition() -> None: n = analysis.CustomNormalizer( "my_normalizer", filter=["lowercase", "asciifolding"], char_filter=["quote"] ) @@ -184,14 +184,14 @@ def test_normalizer_has_definition(): } == n.get_definition() -def test_tokenizer(): +def test_tokenizer() -> None: t = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) assert t.to_dict() == "trigram" assert {"type": "nGram", "min_gram": 3, "max_gram": 3} == t.get_definition() -def test_custom_analyzer_can_collect_custom_items(): +def test_custom_analyzer_can_collect_custom_items() -> None: trigram = analysis.tokenizer("trigram", "nGram", min_gram=3, max_gram=3) my_stop = analysis.token_filter("my_stop", "stop", stopwords=["a", "b"]) umlauts = analysis.char_filter("umlauts", "pattern_replace", mappings=["ü=>ue"]) @@ -218,7 +218,7 @@ def test_custom_analyzer_can_collect_custom_items(): } == a.get_analysis_definition() -def test_stemmer_analyzer_can_pass_name(): +def test_stemmer_analyzer_can_pass_name() -> None: t = analysis.token_filter( "my_english_filter", name="minimal_english", type="stemmer" ) diff --git a/test_opensearchpy/test_helpers/test_document.py b/test_opensearchpy/test_helpers/test_document.py index 086bde17..e1b5e5c4 100644 --- a/test_opensearchpy/test_helpers/test_document.py +++ b/test_opensearchpy/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -31,6 +32,7 @@ import pickle from datetime import datetime from hashlib import sha256 +from typing import Any from pytest import raises @@ -51,7 +53,7 @@ class MyDoc(document.Document): class MySubDoc(MyDoc): - name = field.Keyword() + name: Any = field.Keyword() class Index: name = "default-index" @@ -91,10 +93,10 @@ class Secret(str): class SecretField(field.CustomField): builtin_type = "text" - def _serialize(self, data): + def _serialize(self, data: Any) -> Any: return codecs.encode(data, "rot_13") - def _deserialize(self, data): + def _deserialize(self, data: Any) -> Any: if isinstance(data, Secret): return data return Secret(codecs.decode(data, "rot_13")) @@ -113,6 +115,8 @@ class NestedSecret(document.Document): class Index: name = "test-nested-secret" + _index: Any + class OptionalObjectWithRequiredField(document.Document): comments = field.Nested(properties={"title": field.Keyword(required=True)}) @@ -120,6 +124,8 @@ class OptionalObjectWithRequiredField(document.Document): class Index: name = "test-required" + _index: Any + class Host(document.Document): ip = field.Ip() @@ -127,12 +133,14 @@ class Host(document.Document): class Index: name = "test-host" + _index: Any -def test_range_serializes_properly(): + +def test_range_serializes_properly() -> None: class D(document.Document): lr = field.LongRange() - d = D(lr=Range(lt=42)) + d: Any = D(lr=Range(lt=42)) assert 40 in d.lr assert 47 not in d.lr assert {"lr": {"lt": 42}} == d.to_dict() @@ -141,30 +149,30 @@ class D(document.Document): assert {"lr": {"lt": 42}} == d.to_dict() -def test_range_deserializes_properly(): +def test_range_deserializes_properly() -> None: class D(document.InnerDoc): lr = field.LongRange() - d = D.from_opensearch({"lr": {"lt": 42}}, True) + d: Any = D.from_opensearch({"lr": {"lt": 42}}, True) assert isinstance(d.lr, Range) assert 40 in d.lr assert 47 not in d.lr -def test_resolve_nested(): +def test_resolve_nested() -> None: nested, field = NestedSecret._index.resolve_nested("secrets.title") assert nested == ["secrets"] assert field is NestedSecret._doc_type.mapping["secrets"]["title"] -def test_conflicting_mapping_raises_error_in_index_to_dict(): +def test_conflicting_mapping_raises_error_in_index_to_dict() -> None: class A(document.Document): name = field.Text() class B(document.Document): name = field.Keyword() - i = Index("i") + i: Any = Index("i") i.document(A) i.document(B) @@ -172,18 +180,18 @@ class B(document.Document): i.to_dict() -def test_ip_address_serializes_properly(): - host = Host(ip=ipaddress.IPv4Address("10.0.0.1")) +def test_ip_address_serializes_properly() -> None: + host: Any = Host(ip=ipaddress.IPv4Address("10.0.0.1")) assert {"ip": "10.0.0.1"} == host.to_dict() -def test_matches_uses_index(): +def test_matches_uses_index() -> None: assert SimpleCommit._matches({"_index": "test-git"}) assert not SimpleCommit._matches({"_index": "not-test-git"}) -def test_matches_with_no_name_always_matches(): +def test_matches_with_no_name_always_matches() -> None: class D(document.Document): pass @@ -191,7 +199,7 @@ class D(document.Document): assert D._matches({"_index": "whatever"}) -def test_matches_accepts_wildcards(): +def test_matches_accepts_wildcards() -> None: class MyDoc(document.Document): class Index: name = "my-*" @@ -200,74 +208,76 @@ class Index: assert not MyDoc._matches({"_index": "not-my-index"}) -def test_assigning_attrlist_to_field(): - sc = SimpleCommit() +def test_assigning_attrlist_to_field() -> None: + sc: Any = SimpleCommit() ls = ["README", "README.rst"] sc.files = utils.AttrList(ls) assert sc.to_dict()["files"] is ls -def test_optional_inner_objects_are_not_validated_if_missing(): - d = OptionalObjectWithRequiredField() +def test_optional_inner_objects_are_not_validated_if_missing() -> None: + d: Any = OptionalObjectWithRequiredField() assert d.full_clean() is None -def test_custom_field(): - s = SecretDoc(title=Secret("Hello")) +def test_custom_field() -> None: + s1: Any = SecretDoc(title=Secret("Hello")) - assert {"title": "Uryyb"} == s.to_dict() - assert s.title == "Hello" + assert {"title": "Uryyb"} == s1.to_dict() + assert s1.title == "Hello" - s = SecretDoc.from_opensearch({"_source": {"title": "Uryyb"}}) - assert s.title == "Hello" - assert isinstance(s.title, Secret) + s2: Any = SecretDoc.from_opensearch({"_source": {"title": "Uryyb"}}) + assert s2.title == "Hello" + assert isinstance(s2.title, Secret) -def test_custom_field_mapping(): +def test_custom_field_mapping() -> None: assert { "properties": {"title": {"index": "no", "type": "text"}} } == SecretDoc._doc_type.mapping.to_dict() -def test_custom_field_in_nested(): - s = NestedSecret() +def test_custom_field_in_nested() -> None: + s: Any = NestedSecret() s.secrets.append(SecretDoc(title=Secret("Hello"))) assert {"secrets": [{"title": "Uryyb"}]} == s.to_dict() assert s.secrets[0].title == "Hello" -def test_multi_works_after_doc_has_been_saved(): - c = SimpleCommit() +def test_multi_works_after_doc_has_been_saved() -> None: + c: Any = SimpleCommit() c.full_clean() c.files.append("setup.py") assert c.to_dict() == {"files": ["setup.py"]} -def test_multi_works_in_nested_after_doc_has_been_serialized(): +def test_multi_works_in_nested_after_doc_has_been_serialized() -> None: # Issue #359 - c = DocWithNested(comments=[Comment(title="First!")]) + c: Any = DocWithNested(comments=[Comment(title="First!")]) assert [] == c.comments[0].tags assert {"comments": [{"title": "First!"}]} == c.to_dict() assert [] == c.comments[0].tags -def test_null_value_for_object(): - d = MyDoc(inner=None) +def test_null_value_for_object() -> None: + d: Any = MyDoc(inner=None) assert d.inner is None -def test_inherited_doc_types_can_override_index(): +def test_inherited_doc_types_can_override_index() -> None: class MyDocDifferentIndex(MySubDoc): + _index: Any + class Index: name = "not-default-index" settings = {"number_of_replicas": 0} - aliases = {"a": {}} + aliases: Any = {"a": {}} analyzers = [analyzer("my_analizer", tokenizer="keyword")] assert MyDocDifferentIndex._index._name == "not-default-index" @@ -294,8 +304,8 @@ class Index: } -def test_to_dict_with_meta(): - d = MySubDoc(title="hello") +def test_to_dict_with_meta() -> None: + d: Any = MySubDoc(title="hello") d.meta.routing = "some-parent" assert { @@ -305,29 +315,29 @@ def test_to_dict_with_meta(): } == d.to_dict(True) -def test_to_dict_with_meta_includes_custom_index(): - d = MySubDoc(title="hello") +def test_to_dict_with_meta_includes_custom_index() -> None: + d: Any = MySubDoc(title="hello") d.meta.index = "other-index" assert {"_index": "other-index", "_source": {"title": "hello"}} == d.to_dict(True) -def test_to_dict_without_skip_empty_will_include_empty_fields(): - d = MySubDoc(tags=[], title=None, inner={}) +def test_to_dict_without_skip_empty_will_include_empty_fields() -> None: + d: Any = MySubDoc(tags=[], title=None, inner={}) assert {} == d.to_dict() assert {"tags": [], "title": None, "inner": {}} == d.to_dict(skip_empty=False) -def test_attribute_can_be_removed(): - d = MyDoc(title="hello") +def test_attribute_can_be_removed() -> None: + d: Any = MyDoc(title="hello") del d.title assert "title" not in d._d_ -def test_doc_type_can_be_correctly_pickled(): - d = DocWithNested( +def test_doc_type_can_be_correctly_pickled() -> None: + d: Any = DocWithNested( title="Hello World!", comments=[Comment(title="hellp")], meta={"id": 42} ) s = pickle.dumps(d) @@ -341,15 +351,15 @@ def test_doc_type_can_be_correctly_pickled(): assert isinstance(d2.comments[0], Comment) -def test_meta_is_accessible_even_on_empty_doc(): - d = MyDoc() - d.meta +def test_meta_is_accessible_even_on_empty_doc() -> None: + d1: Any = MyDoc() + d1.meta - d = MyDoc(title="aaa") - d.meta + d2: Any = MyDoc(title="aaa") + d2.meta -def test_meta_field_mapping(): +def test_meta_field_mapping() -> None: class User(document.Document): username = field.Text() @@ -368,33 +378,33 @@ class Meta: } == User._doc_type.mapping.to_dict() -def test_multi_value_fields(): +def test_multi_value_fields() -> None: class Blog(document.Document): tags = field.Keyword(multi=True) - b = Blog() + b: Any = Blog() assert [] == b.tags b.tags.append("search") b.tags.append("python") assert ["search", "python"] == b.tags -def test_docs_with_properties(): +def test_docs_with_properties() -> None: class User(document.Document): - pwd_hash = field.Text() + pwd_hash: Any = field.Text() - def check_password(self, pwd): + def check_password(self, pwd: Any) -> Any: return sha256(pwd).hexdigest() == self.pwd_hash @property - def password(self): + def password(self) -> Any: raise AttributeError("readonly") @password.setter - def password(self, pwd): + def password(self, pwd: Any) -> None: self.pwd_hash = sha256(pwd).hexdigest() - u = User(pwd_hash=sha256(b"secret").hexdigest()) + u: Any = User(pwd_hash=sha256(b"secret").hexdigest()) assert u.check_password(b"secret") assert not u.check_password(b"not-secret") @@ -407,9 +417,9 @@ def password(self, pwd): u.password -def test_nested_can_be_assigned_to(): - d1 = DocWithNested(comments=[Comment(title="First!")]) - d2 = DocWithNested() +def test_nested_can_be_assigned_to() -> None: + d1: Any = DocWithNested(comments=[Comment(title="First!")]) + d2: Any = DocWithNested() d2.comments = d1.comments assert isinstance(d1.comments[0], Comment) @@ -418,14 +428,14 @@ def test_nested_can_be_assigned_to(): assert isinstance(d2.comments[0], Comment) -def test_nested_can_be_none(): - d = DocWithNested(comments=None, title="Hello World!") +def test_nested_can_be_none() -> None: + d: Any = DocWithNested(comments=None, title="Hello World!") assert {"title": "Hello World!"} == d.to_dict() -def test_nested_defaults_to_list_and_can_be_updated(): - md = DocWithNested() +def test_nested_defaults_to_list_and_can_be_updated() -> None: + md: Any = DocWithNested() assert [] == md.comments @@ -433,8 +443,8 @@ def test_nested_defaults_to_list_and_can_be_updated(): assert {"comments": [{"title": "hello World!"}]} == md.to_dict() -def test_to_dict_is_recursive_and_can_cope_with_multi_values(): - md = MyDoc(name=["a", "b", "c"]) +def test_to_dict_is_recursive_and_can_cope_with_multi_values() -> None: + md: Any = MyDoc(name=["a", "b", "c"]) md.inner = [MyInner(old_field="of1"), MyInner(old_field="of2")] assert isinstance(md.inner[0], MyInner) @@ -445,13 +455,13 @@ def test_to_dict_is_recursive_and_can_cope_with_multi_values(): } == md.to_dict() -def test_to_dict_ignores_empty_collections(): - md = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) +def test_to_dict_ignores_empty_collections() -> None: + md: Any = MySubDoc(name="", address={}, count=0, valid=False, tags=[]) assert {"name": "", "count": 0, "valid": False} == md.to_dict() -def test_declarative_mapping_definition(): +def test_declarative_mapping_definition() -> None: assert issubclass(MyDoc, document.Document) assert hasattr(MyDoc, "_doc_type") assert { @@ -464,7 +474,7 @@ def test_declarative_mapping_definition(): } == MyDoc._doc_type.mapping.to_dict() -def test_you_can_supply_own_mapping_instance(): +def test_you_can_supply_own_mapping_instance() -> None: class MyD(document.Document): title = field.Text() @@ -478,9 +488,9 @@ class Meta: } == MyD._doc_type.mapping.to_dict() -def test_document_can_be_created_dynamically(): +def test_document_can_be_created_dynamically() -> None: n = datetime.now() - md = MyDoc(title="hello") + md: Any = MyDoc(title="hello") md.name = "My Fancy Document!" md.created_at = n @@ -499,14 +509,14 @@ def test_document_can_be_created_dynamically(): } == md.to_dict() -def test_invalid_date_will_raise_exception(): - md = MyDoc() +def test_invalid_date_will_raise_exception() -> None: + md: Any = MyDoc() md.created_at = "not-a-date" with raises(ValidationException): md.full_clean() -def test_document_inheritance(): +def test_document_inheritance() -> None: assert issubclass(MySubDoc, MyDoc) assert issubclass(MySubDoc, document.Document) assert hasattr(MySubDoc, "_doc_type") @@ -520,7 +530,7 @@ def test_document_inheritance(): } == MySubDoc._doc_type.mapping.to_dict() -def test_child_class_can_override_parent(): +def test_child_class_can_override_parent() -> None: class A(document.Document): o = field.Object(dynamic=False, properties={"a": field.Text()}) @@ -538,8 +548,8 @@ class B(A): } == B._doc_type.mapping.to_dict() -def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): - md = MySubDoc(meta={"id": 42}, name="My First doc!") +def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict() -> None: + md: Any = MySubDoc(meta={"id": 42}, name="My First doc!") md.meta.index = "my-index" assert md.meta.index == "my-index" @@ -548,7 +558,7 @@ def test_meta_fields_are_stored_in_meta_and_ignored_by_to_dict(): assert {"id": 42, "index": "my-index"} == md.meta.to_dict() -def test_index_inheritance(): +def test_index_inheritance() -> None: assert issubclass(MyMultiSubDoc, MySubDoc) assert issubclass(MyMultiSubDoc, MyDoc2) assert issubclass(MyMultiSubDoc, document.Document) @@ -565,33 +575,33 @@ def test_index_inheritance(): } == MyMultiSubDoc._doc_type.mapping.to_dict() -def test_meta_fields_can_be_set_directly_in_init(): +def test_meta_fields_can_be_set_directly_in_init() -> None: p = object() - md = MyDoc(_id=p, title="Hello World!") + md: Any = MyDoc(_id=p, title="Hello World!") assert md.meta.id is p -def test_save_no_index(mock_client): - md = MyDoc() +def test_save_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): md.save(using="mock") -def test_delete_no_index(mock_client): - md = MyDoc() +def test_delete_no_index(mock_client: Any) -> None: + md: Any = MyDoc() with raises(ValidationException): md.delete(using="mock") -def test_update_no_fields(): - md = MyDoc() +def test_update_no_fields() -> None: + md: Any = MyDoc() with raises(IllegalOperation): md.update() -def test_search_with_custom_alias_and_index(mock_client): - search_object = MyDoc.search( +def test_search_with_custom_alias_and_index(mock_client: Any) -> None: + search_object: Any = MyDoc.search( using="staging", index=["custom_index1", "custom_index2"] ) @@ -599,7 +609,7 @@ def test_search_with_custom_alias_and_index(mock_client): assert search_object._index == ["custom_index1", "custom_index2"] -def test_from_opensearch_respects_underscored_non_meta_fields(): +def test_from_opensearch_respects_underscored_non_meta_fields() -> None: doc = { "_index": "test-index", "_id": "opensearch", @@ -616,18 +626,18 @@ class Company(document.Document): class Index: name = "test-company" - c = Company.from_opensearch(doc) + c: Any = Company.from_opensearch(doc) assert c.meta.fields._tags == ["search"] assert c.meta.fields._routing == "opensearch" assert c._tagline == "You know, for search" -def test_nested_and_object_inner_doc(): +def test_nested_and_object_inner_doc() -> None: class MySubDocWithNested(MyDoc): nested_inner = field.Nested(MyInner) - props = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] + props: Any = MySubDocWithNested._doc_type.mapping.to_dict()["properties"] assert props == { "created_at": {"type": "date"}, "inner": {"properties": {"old_field": {"type": "text"}}, "type": "object"}, diff --git a/test_opensearchpy/test_helpers/test_faceted_search.py b/test_opensearchpy/test_helpers/test_faceted_search.py index 066fc9d4..528cd485 100644 --- a/test_opensearchpy/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any import pytest @@ -48,7 +50,7 @@ class BlogSearch(FacetedSearch): } -def test_query_is_created_properly(): +def test_query_is_created_properly() -> None: bs = BlogSearch("python search") s = bs.build_search() @@ -71,7 +73,7 @@ def test_query_is_created_properly(): } == s.to_dict() -def test_query_is_created_properly_with_sort_tuple(): +def test_query_is_created_properly_with_sort_tuple() -> None: bs = BlogSearch("python search", sort=("category", "-title")) s = bs.build_search() @@ -95,7 +97,7 @@ def test_query_is_created_properly_with_sort_tuple(): } == s.to_dict() -def test_filter_is_applied_to_search_but_not_relevant_facet(): +def test_filter_is_applied_to_search_but_not_relevant_facet() -> None: bs = BlogSearch("python search", filters={"category": "opensearch"}) s = bs.build_search() @@ -118,7 +120,7 @@ def test_filter_is_applied_to_search_but_not_relevant_facet(): } == s.to_dict() -def test_filters_are_applied_to_search_ant_relevant_facets(): +def test_filters_are_applied_to_search_ant_relevant_facets() -> None: bs = BlogSearch( "python search", filters={"category": "opensearch", "tags": ["python", "django"]}, @@ -152,13 +154,13 @@ def test_filters_are_applied_to_search_ant_relevant_facets(): } == d -def test_date_histogram_facet_with_1970_01_01_date(): +def test_date_histogram_facet_with_1970_01_01_date() -> None: dhf = DateHistogramFacet() assert dhf.get_value({"key": None}) == datetime(1970, 1, 1, 0, 0) assert dhf.get_value({"key": 0}) == datetime(1970, 1, 1, 0, 0) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore ["interval_type", "interval"], [ ("interval", "year"), @@ -185,7 +187,7 @@ def test_date_histogram_facet_with_1970_01_01_date(): ("fixed_interval", "1h"), ], ) -def test_date_histogram_interval_types(interval_type, interval): +def test_date_histogram_interval_types(interval_type: Any, interval: Any) -> None: dhf = DateHistogramFacet(field="@timestamp", **{interval_type: interval}) assert dhf.get_aggregation().to_dict() == { "date_histogram": { @@ -197,7 +199,7 @@ def test_date_histogram_interval_types(interval_type, interval): dhf.get_value_filter(datetime.now()) -def test_date_histogram_no_interval_keyerror(): +def test_date_histogram_no_interval_keyerror() -> None: dhf = DateHistogramFacet(field="@timestamp") with pytest.raises(KeyError) as e: dhf.get_value_filter(datetime.now()) diff --git a/test_opensearchpy/test_helpers/test_field.py b/test_opensearchpy/test_helpers/test_field.py index 15b51c52..ce818b50 100644 --- a/test_opensearchpy/test_helpers/test_field.py +++ b/test_opensearchpy/test_helpers/test_field.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,9 +26,9 @@ # under the License. import base64 -import sys from datetime import datetime from ipaddress import ip_address +from typing import Any import pytest from dateutil import tz @@ -36,7 +37,7 @@ from opensearchpy.helpers import field -def test_date_range_deserialization(): +def test_date_range_deserialization() -> None: data = {"lt": "2018-01-01T00:30:10"} r = field.DateRange().deserialize(data) @@ -45,7 +46,7 @@ def test_date_range_deserialization(): assert r.lt == datetime(2018, 1, 1, 0, 30, 10) -def test_boolean_deserialization(): +def test_boolean_deserialization() -> None: bf = field.Boolean() assert not bf.deserialize("false") @@ -58,8 +59,8 @@ def test_boolean_deserialization(): assert bf.deserialize(1) -def test_date_field_can_have_default_tz(): - f = field.Date(default_timezone="UTC") +def test_date_field_can_have_default_tz() -> None: + f: Any = field.Date(default_timezone="UTC") now = datetime.now() now_with_tz = f._deserialize(now) @@ -73,10 +74,10 @@ def test_date_field_can_have_default_tz(): assert now.isoformat() + "+00:00" == now_with_tz.isoformat() -def test_custom_field_car_wrap_other_field(): +def test_custom_field_car_wrap_other_field() -> None: class MyField(field.CustomField): @property - def builtin_type(self): + def builtin_type(self) -> Any: return field.Text(**self._params) assert {"type": "text", "index": "not_analyzed"} == MyField( @@ -84,14 +85,14 @@ def builtin_type(self): ).to_dict() -def test_field_from_dict(): +def test_field_from_dict() -> None: f = field.construct_field({"type": "text", "index": "not_analyzed"}) assert isinstance(f, field.Text) assert {"type": "text", "index": "not_analyzed"} == f.to_dict() -def test_multi_fields_are_accepted_and_parsed(): +def test_multi_fields_are_accepted_and_parsed() -> None: f = field.construct_field( "text", fields={"raw": {"type": "keyword"}, "eng": field.Text(analyzer="english")}, @@ -107,14 +108,14 @@ def test_multi_fields_are_accepted_and_parsed(): } == f.to_dict() -def test_nested_provides_direct_access_to_its_fields(): +def test_nested_provides_direct_access_to_its_fields() -> None: f = field.Nested(properties={"name": {"type": "text", "index": "not_analyzed"}}) assert "name" in f assert f["name"] == field.Text(index="not_analyzed") -def test_field_supports_multiple_analyzers(): +def test_field_supports_multiple_analyzers() -> None: f = field.Text(analyzer="snowball", search_analyzer="keyword") assert { "analyzer": "snowball", @@ -123,7 +124,7 @@ def test_field_supports_multiple_analyzers(): } == f.to_dict() -def test_multifield_supports_multiple_analyzers(): +def test_multifield_supports_multiple_analyzers() -> None: f = field.Text( fields={ "f1": field.Text(search_analyzer="keyword", analyzer="snowball"), @@ -143,15 +144,14 @@ def test_multifield_supports_multiple_analyzers(): } == f.to_dict() -def test_scaled_float(): +def test_scaled_float() -> None: with pytest.raises(TypeError): - field.ScaledFloat() - f = field.ScaledFloat(123) + field.ScaledFloat() # type: ignore + f: Any = field.ScaledFloat(scaling_factor=123) assert f.to_dict() == {"scaling_factor": 123, "type": "scaled_float"} -@pytest.mark.skipif(sys.version_info < (3, 6), reason="requires python3.6 or higher") -def test_ipaddress(): +def test_ipaddress() -> None: f = field.Ip() assert f.deserialize("127.0.0.1") == ip_address("127.0.0.1") assert f.deserialize("::1") == ip_address("::1") @@ -161,7 +161,7 @@ def test_ipaddress(): assert f.deserialize("not_an_ipaddress") -def test_float(): +def test_float() -> None: f = field.Float() assert f.deserialize("42") == 42.0 assert f.deserialize(None) is None @@ -169,7 +169,7 @@ def test_float(): assert f.deserialize("not_a_float") -def test_integer(): +def test_integer() -> None: f = field.Integer() assert f.deserialize("42") == 42 assert f.deserialize(None) is None @@ -177,35 +177,35 @@ def test_integer(): assert f.deserialize("not_an_integer") -def test_binary(): +def test_binary() -> None: f = field.Binary() assert f.deserialize(base64.b64encode(b"42")) == b"42" assert f.deserialize(f.serialize(b"42")) == b"42" assert f.deserialize(None) is None -def test_constant_keyword(): +def test_constant_keyword() -> None: f = field.ConstantKeyword() assert f.to_dict() == {"type": "constant_keyword"} -def test_rank_features(): +def test_rank_features() -> None: f = field.RankFeatures() assert f.to_dict() == {"type": "rank_features"} -def test_object_dynamic_values(): +def test_object_dynamic_values() -> None: for dynamic in True, False, "strict": f = field.Object(dynamic=dynamic) assert f.to_dict()["dynamic"] == dynamic -def test_object_disabled(): +def test_object_disabled() -> None: f = field.Object(enabled=False) assert f.to_dict() == {"type": "object", "enabled": False} -def test_object_constructor(): +def test_object_constructor() -> None: expected = {"type": "object", "properties": {"inner_int": {"type": "integer"}}} class Inner(InnerDoc): diff --git a/test_opensearchpy/test_helpers/test_index.py b/test_opensearchpy/test_helpers/test_index.py index 40048bc6..59c3e28e 100644 --- a/test_opensearchpy/test_helpers/test_index.py +++ b/test_opensearchpy/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,6 +27,7 @@ import string from random import choice +from typing import Any from pytest import raises @@ -37,7 +39,7 @@ class Post(Document): published_from = Date() -def test_multiple_doc_types_will_combine_mappings(): +def test_multiple_doc_types_will_combine_mappings() -> None: class User(Document): username = Text() @@ -55,16 +57,16 @@ class User(Document): } == i.to_dict() -def test_search_is_limited_to_index_name(): +def test_search_is_limited_to_index_name() -> None: i = Index("my-index") s = i.search() assert s._index == ["my-index"] -def test_cloned_index_has_copied_settings_and_using(): +def test_cloned_index_has_copied_settings_and_using() -> None: client = object() - i = Index("my-index", using=client) + i: Any = Index("my-index", using=client) i.settings(number_of_shards=1) i2 = i.clone("my-other-index") @@ -75,13 +77,13 @@ def test_cloned_index_has_copied_settings_and_using(): assert i._settings is not i2._settings -def test_cloned_index_has_analysis_attribute(): +def test_cloned_index_has_analysis_attribute() -> None: """ Regression test for Issue #582 in which `Index.clone()` was not copying over the `_analysis` attribute. """ client = object() - i = Index("my-index", using=client) + i: Any = Index("my-index", using=client) random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( @@ -95,16 +97,16 @@ def test_cloned_index_has_analysis_attribute(): assert i.to_dict()["settings"]["analysis"] == i2.to_dict()["settings"]["analysis"] -def test_settings_are_saved(): - i = Index("i") +def test_settings_are_saved() -> None: + i: Any = Index("i") i.settings(number_of_replicas=0) i.settings(number_of_shards=1) assert {"settings": {"number_of_shards": 1, "number_of_replicas": 0}} == i.to_dict() -def test_registered_doc_type_included_in_to_dict(): - i = Index("i", using="alias") +def test_registered_doc_type_included_in_to_dict() -> None: + i: Any = Index("i", using="alias") i.document(Post) assert { @@ -117,8 +119,8 @@ def test_registered_doc_type_included_in_to_dict(): } == i.to_dict() -def test_registered_doc_type_included_in_search(): - i = Index("i", using="alias") +def test_registered_doc_type_included_in_search() -> None: + i: Any = Index("i", using="alias") i.document(Post) s = i.search() @@ -126,33 +128,33 @@ def test_registered_doc_type_included_in_search(): assert s._doc_type == [Post] -def test_aliases_add_to_object(): +def test_aliases_add_to_object() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.aliases(**alias_dict) assert index._aliases == alias_dict -def test_aliases_returned_from_to_dict(): +def test_aliases_returned_from_to_dict() -> None: random_alias = "".join((choice(string.ascii_letters) for _ in range(100))) - alias_dict = {random_alias: {}} + alias_dict: Any = {random_alias: {}} - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.aliases(**alias_dict) assert index._aliases == index.to_dict()["aliases"] == alias_dict -def test_analyzers_added_to_object(): +def test_analyzers_added_to_object() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.analyzer(random_analyzer) assert index._analysis["analyzer"][random_analyzer_name] == { @@ -162,12 +164,12 @@ def test_analyzers_added_to_object(): } -def test_analyzers_returned_from_to_dict(): +def test_analyzers_returned_from_to_dict() -> None: random_analyzer_name = "".join((choice(string.ascii_letters) for _ in range(100))) random_analyzer = analyzer( random_analyzer_name, tokenizer="standard", filter="standard" ) - index = Index("i", using="alias") + index: Any = Index("i", using="alias") index.analyzer(random_analyzer) assert index.to_dict()["settings"]["analysis"]["analyzer"][ @@ -175,22 +177,22 @@ def test_analyzers_returned_from_to_dict(): ] == {"filter": ["standard"], "type": "custom", "tokenizer": "standard"} -def test_conflicting_analyzer_raises_error(): - i = Index("i") +def test_conflicting_analyzer_raises_error() -> None: + i: Any = Index("i") i.analyzer("my_analyzer", tokenizer="whitespace", filter=["lowercase", "stop"]) with raises(ValueError): i.analyzer("my_analyzer", tokenizer="keyword", filter=["lowercase", "stop"]) -def test_index_template_can_have_order(): - i = Index("i-*") +def test_index_template_can_have_order() -> None: + i: Any = Index("i-*") it = i.as_template("i", order=2) assert {"index_patterns": ["i-*"], "order": 2} == it.to_dict() -def test_index_template_save_result(mock_client): - it = IndexTemplate("test-template", "test-*") +def test_index_template_save_result(mock_client: Any) -> None: + it: Any = IndexTemplate("test-template", "test-*") assert it.save(using="mock") == mock_client.indices.put_template() diff --git a/test_opensearchpy/test_helpers/test_mapping.py b/test_opensearchpy/test_helpers/test_mapping.py index 822440a4..2006b66f 100644 --- a/test_opensearchpy/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -30,7 +31,7 @@ from opensearchpy.helpers import analysis, mapping -def test_mapping_can_has_fields(): +def test_mapping_can_has_fields() -> None: m = mapping.Mapping() m.field("name", "text").field("tags", "keyword") @@ -39,7 +40,7 @@ def test_mapping_can_has_fields(): } == m.to_dict() -def test_mapping_update_is_recursive(): +def test_mapping_update_is_recursive() -> None: m1 = mapping.Mapping() m1.field("title", "text") m1.field("author", "object") @@ -72,7 +73,7 @@ def test_mapping_update_is_recursive(): } == m1.to_dict() -def test_properties_can_iterate_over_all_the_fields(): +def test_properties_can_iterate_over_all_the_fields() -> None: m = mapping.Mapping() m.field("f1", "text", test_attr="f1", fields={"f2": Keyword(test_attr="f2")}) m.field("f3", Nested(test_attr="f3", properties={"f4": Text(test_attr="f4")})) @@ -82,7 +83,7 @@ def test_properties_can_iterate_over_all_the_fields(): } -def test_mapping_can_collect_all_analyzers_and_normalizers(): +def test_mapping_can_collect_all_analyzers_and_normalizers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -155,7 +156,7 @@ def test_mapping_can_collect_all_analyzers_and_normalizers(): assert json.loads(json.dumps(m.to_dict())) == m.to_dict() -def test_mapping_can_collect_multiple_analyzers(): +def test_mapping_can_collect_multiple_analyzers() -> None: a1 = analysis.analyzer( "my_analyzer1", tokenizer="keyword", @@ -201,7 +202,7 @@ def test_mapping_can_collect_multiple_analyzers(): } == m._collect_analysis() -def test_even_non_custom_analyzers_can_have_params(): +def test_even_non_custom_analyzers_can_have_params() -> None: a1 = analysis.analyzer("whitespace", type="pattern", pattern=r"\\s+") m = mapping.Mapping() m.field("title", "text", analyzer=a1) @@ -211,14 +212,14 @@ def test_even_non_custom_analyzers_can_have_params(): } == m._collect_analysis() -def test_resolve_field_can_resolve_multifields(): +def test_resolve_field_can_resolve_multifields() -> None: m = mapping.Mapping() m.field("title", "text", fields={"keyword": Keyword()}) assert isinstance(m.resolve_field("title.keyword"), Keyword) -def test_resolve_nested(): +def test_resolve_nested() -> None: m = mapping.Mapping() m.field("n1", "nested", properties={"n2": Nested(properties={"k1": Keyword()})}) m.field("k2", "keyword") diff --git a/test_opensearchpy/test_helpers/test_query.py b/test_opensearchpy/test_helpers/test_query.py index 46707f2c..27790748 100644 --- a/test_opensearchpy/test_helpers/test_query.py +++ b/test_opensearchpy/test_helpers/test_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,66 +25,68 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from pytest import raises from opensearchpy.helpers import function, query -def test_empty_Q_is_match_all(): +def test_empty_Q_is_match_all() -> None: q = query.Q() assert isinstance(q, query.MatchAll) assert query.MatchAll() == q -def test_match_to_dict(): +def test_match_to_dict() -> None: assert {"match": {"f": "value"}} == query.Match(f="value").to_dict() -def test_match_to_dict_extra(): +def test_match_to_dict_extra() -> None: assert {"match": {"f": "value", "boost": 2}} == query.Match( f="value", boost=2 ).to_dict() -def test_fuzzy_to_dict(): +def test_fuzzy_to_dict() -> None: assert {"fuzzy": {"f": "value"}} == query.Fuzzy(f="value").to_dict() -def test_prefix_to_dict(): +def test_prefix_to_dict() -> None: assert {"prefix": {"f": "value"}} == query.Prefix(f="value").to_dict() -def test_term_to_dict(): +def test_term_to_dict() -> None: assert {"term": {"_type": "article"}} == query.Term(_type="article").to_dict() -def test_bool_to_dict(): +def test_bool_to_dict() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert {"bool": {"must": [{"match": {"f": "value"}}]}} == bool.to_dict() -def test_dismax_to_dict(): +def test_dismax_to_dict() -> None: assert {"dis_max": {"queries": [{"term": {"_type": "article"}}]}} == query.DisMax( queries=[query.Term(_type="article")] ).to_dict() -def test_bool_from_dict_issue_318(): +def test_bool_from_dict_issue_318() -> None: d = {"bool": {"must_not": {"match": {"field": "value"}}}} q = query.Q(d) assert q == ~query.Match(field="value") -def test_repr(): +def test_repr() -> None: bool = query.Bool(must=[query.Match(f="value")], should=[]) assert "Bool(must=[Match(f='value')])" == repr(bool) -def test_query_clone(): +def test_query_clone() -> None: bool = query.Bool( must=[query.Match(x=42)], should=[query.Match(g="v2")], @@ -95,14 +98,14 @@ def test_query_clone(): assert bool is not bool_clone -def test_bool_converts_its_init_args_to_queries(): +def test_bool_converts_its_init_args_to_queries() -> None: q = query.Bool(must=[{"match": {"f": "value"}}]) assert len(q.must) == 1 assert q.must[0] == query.Match(f="value") -def test_two_queries_make_a_bool(): +def test_two_queries_make_a_bool() -> None: q1 = query.Match(f="value1") q2 = query.Match(message={"query": "this is a test", "opeartor": "and"}) q = q1 & q2 @@ -111,7 +114,7 @@ def test_two_queries_make_a_bool(): assert [q1, q2] == q.must -def test_other_and_bool_appends_other_to_must(): +def test_other_and_bool_appends_other_to_must() -> None: q1 = query.Match(f="value1") qb = query.Bool() @@ -120,16 +123,16 @@ def test_other_and_bool_appends_other_to_must(): assert q.must[0] == q1 -def test_bool_and_other_appends_other_to_must(): - q1 = query.Match(f="value1") - qb = query.Bool() +def test_bool_and_other_appends_other_to_must() -> None: + q1: Any = query.Match(f="value1") + qb: Any = query.Bool() q = qb & q1 assert q is not qb assert q.must[0] == q1 -def test_bool_and_other_sets_min_should_match_if_needed(): +def test_bool_and_other_sets_min_should_match_if_needed() -> None: q1 = query.Q("term", category=1) q2 = query.Q( "bool", should=[query.Q("term", name="aaa"), query.Q("term", name="bbb")] @@ -143,7 +146,7 @@ def test_bool_and_other_sets_min_should_match_if_needed(): ) -def test_bool_with_different_minimum_should_match_should_not_be_combined(): +def test_bool_with_different_minimum_should_match_should_not_be_combined() -> None: q1 = query.Q( "bool", minimum_should_match=2, @@ -182,11 +185,11 @@ def test_bool_with_different_minimum_should_match_should_not_be_combined(): assert q5 == query.Bool(should=[q1, q2, q3]) -def test_empty_bool_has_min_should_match_0(): +def test_empty_bool_has_min_should_match_0() -> None: assert 0 == query.Bool()._min_should_match -def test_query_and_query_creates_bool(): +def test_query_and_query_creates_bool() -> None: q1 = query.Match(f=42) q2 = query.Match(g=47) @@ -195,7 +198,7 @@ def test_query_and_query_creates_bool(): assert q.must == [q1, q2] -def test_match_all_and_query_equals_other(): +def test_match_all_and_query_equals_other() -> None: q1 = query.Match(f=42) q2 = query.MatchAll() @@ -203,39 +206,39 @@ def test_match_all_and_query_equals_other(): assert q1 == q -def test_not_match_all_is_match_none(): +def test_not_match_all_is_match_none() -> None: q = query.MatchAll() assert ~q == query.MatchNone() -def test_not_match_none_is_match_all(): +def test_not_match_none_is_match_all() -> None: q = query.MatchNone() assert ~q == query.MatchAll() -def test_invert_empty_bool_is_match_none(): +def test_invert_empty_bool_is_match_none() -> None: q = query.Bool() assert ~q == query.MatchNone() -def test_match_none_or_query_equals_query(): +def test_match_none_or_query_equals_query() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 | q2 == query.Match(f=42) -def test_match_none_and_query_equals_match_none(): +def test_match_none_and_query_equals_match_none() -> None: q1 = query.Match(f=42) q2 = query.MatchNone() assert q1 & q2 == query.MatchNone() -def test_bool_and_bool(): +def test_bool_and_bool() -> None: qt1, qt2, qt3 = query.Match(f=1), query.Match(f=2), query.Match(f=3) q1 = query.Bool(must=[qt1], should=[qt2]) @@ -251,7 +254,7 @@ def test_bool_and_bool(): ) -def test_bool_and_bool_with_min_should_match(): +def test_bool_and_bool_with_min_should_match() -> None: qt1, qt2 = query.Match(f=1), query.Match(f=2) q1 = query.Q("bool", minimum_should_match=1, should=[qt1]) q2 = query.Q("bool", minimum_should_match=1, should=[qt2]) @@ -259,19 +262,19 @@ def test_bool_and_bool_with_min_should_match(): assert query.Q("bool", must=[qt1, qt2]) == q1 & q2 -def test_inverted_query_becomes_bool_with_must_not(): +def test_inverted_query_becomes_bool_with_must_not() -> None: q = query.Match(f=42) assert ~q == query.Bool(must_not=[query.Match(f=42)]) -def test_inverted_query_with_must_not_become_should(): +def test_inverted_query_with_must_not_become_should() -> None: q = query.Q("bool", must_not=[query.Q("match", f=1), query.Q("match", f=2)]) assert ~q == query.Q("bool", should=[query.Q("match", f=1), query.Q("match", f=2)]) -def test_inverted_query_with_must_and_must_not(): +def test_inverted_query_with_must_and_must_not() -> None: q = query.Q( "bool", must=[query.Q("match", f=3), query.Q("match", f=4)], @@ -291,13 +294,13 @@ def test_inverted_query_with_must_and_must_not(): ) -def test_double_invert_returns_original_query(): +def test_double_invert_returns_original_query() -> None: q = query.Match(f=42) assert q == ~~q -def test_bool_query_gets_inverted_internally(): +def test_bool_query_gets_inverted_internally() -> None: q = query.Bool(must_not=[query.Match(f=42)], must=[query.Match(g="v")]) assert ~q == query.Bool( @@ -310,7 +313,7 @@ def test_bool_query_gets_inverted_internally(): ) -def test_match_all_or_something_is_match_all(): +def test_match_all_or_something_is_match_all() -> None: q1 = query.MatchAll() q2 = query.Match(f=42) @@ -318,7 +321,7 @@ def test_match_all_or_something_is_match_all(): assert (q2 | q1) == query.MatchAll() -def test_or_produces_bool_with_should(): +def test_or_produces_bool_with_should() -> None: q1 = query.Match(f=42) q2 = query.Match(g="v") @@ -326,7 +329,7 @@ def test_or_produces_bool_with_should(): assert q == query.Bool(should=[q1, q2]) -def test_or_bool_doesnt_loop_infinitely_issue_37(): +def test_or_bool_doesnt_loop_infinitely_issue_37() -> None: q = query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -334,7 +337,7 @@ def test_or_bool_doesnt_loop_infinitely_issue_37(): ) -def test_or_bool_doesnt_loop_infinitely_issue_96(): +def test_or_bool_doesnt_loop_infinitely_issue_96() -> None: q = ~query.Match(f=42) | ~query.Match(f=47) assert q == query.Bool( @@ -345,14 +348,14 @@ def test_or_bool_doesnt_loop_infinitely_issue_96(): ) -def test_bool_will_append_another_query_with_or(): +def test_bool_will_append_another_query_with_or() -> None: qb = query.Bool(should=[query.Match(f="v"), query.Match(f="v2")]) q = query.Match(g=42) assert (q | qb) == query.Bool(should=[query.Match(f="v"), query.Match(f="v2"), q]) -def test_bool_queries_with_only_should_get_concatenated(): +def test_bool_queries_with_only_should_get_concatenated() -> None: q1 = query.Bool(should=[query.Match(f=1), query.Match(f=2)]) q2 = query.Bool(should=[query.Match(f=3), query.Match(f=4)]) @@ -361,7 +364,7 @@ def test_bool_queries_with_only_should_get_concatenated(): ) -def test_two_bool_queries_append_one_to_should_if_possible(): +def test_two_bool_queries_append_one_to_should_if_possible() -> None: q1 = query.Bool(should=[query.Match(f="v")]) q2 = query.Bool(must=[query.Match(f="v")]) @@ -373,12 +376,12 @@ def test_two_bool_queries_append_one_to_should_if_possible(): ) -def test_queries_are_registered(): +def test_queries_are_registered() -> None: assert "match" in query.Query._classes assert query.Query._classes["match"] is query.Match -def test_defining_query_registers_it(): +def test_defining_query_registers_it() -> None: class MyQuery(query.Query): name = "my_query" @@ -386,62 +389,62 @@ class MyQuery(query.Query): assert query.Query._classes["my_query"] is MyQuery -def test_Q_passes_query_through(): +def test_Q_passes_query_through() -> None: q = query.Match(f="value1") assert query.Q(q) is q -def test_Q_constructs_query_by_name(): +def test_Q_constructs_query_by_name() -> None: q = query.Q("match", f="value") assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_translates_double_underscore_to_dots_in_param_names(): +def test_Q_translates_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza") assert {"comment.author": "honza"} == q._params -def test_Q_doesn_translate_double_underscore_to_dots_in_param_names(): +def test_Q_doesn_translate_double_underscore_to_dots_in_param_names() -> None: q = query.Q("match", comment__author="honza", _expand__to_dot=False) assert {"comment__author": "honza"} == q._params -def test_Q_constructs_simple_query_from_dict(): +def test_Q_constructs_simple_query_from_dict() -> None: q = query.Q({"match": {"f": "value"}}) assert isinstance(q, query.Match) assert {"f": "value"} == q._params -def test_Q_constructs_compound_query_from_dict(): +def test_Q_constructs_compound_query_from_dict() -> None: q = query.Q({"bool": {"must": [{"match": {"f": "value"}}]}}) assert q == query.Bool(must=[query.Match(f="value")]) -def test_Q_raises_error_when_passed_in_dict_and_params(): +def test_Q_raises_error_when_passed_in_dict_and_params() -> None: with raises(Exception): query.Q({"match": {"f": "value"}}, f="value") -def test_Q_raises_error_when_passed_in_query_and_params(): +def test_Q_raises_error_when_passed_in_query_and_params() -> None: q = query.Match(f="value1") with raises(Exception): query.Q(q, f="value") -def test_Q_raises_error_on_unknown_query(): +def test_Q_raises_error_on_unknown_query() -> None: with raises(Exception): query.Q("not a query", f="value") -def test_match_all_and_anything_is_anything(): +def test_match_all_and_anything_is_anything() -> None: q = query.MatchAll() s = query.Match(f=42) @@ -449,7 +452,7 @@ def test_match_all_and_anything_is_anything(): assert s & q == s -def test_function_score_with_functions(): +def test_function_score_with_functions() -> None: q = query.Q( "function_score", functions=[query.SF("script_score", script="doc['comment_count'] * _score")], @@ -462,7 +465,7 @@ def test_function_score_with_functions(): } == q.to_dict() -def test_function_score_with_no_function_is_boost_factor(): +def test_function_score_with_no_function_is_boost_factor() -> None: q = query.Q( "function_score", functions=[query.SF({"weight": 20, "filter": query.Q("term", f=42)})], @@ -473,7 +476,7 @@ def test_function_score_with_no_function_is_boost_factor(): } == q.to_dict() -def test_function_score_to_dict(): +def test_function_score_to_dict() -> None: q = query.Q( "function_score", query=query.Q("match", title="python"), @@ -502,7 +505,7 @@ def test_function_score_to_dict(): assert d == q.to_dict() -def test_function_score_with_single_function(): +def test_function_score_with_single_function() -> None: d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -520,7 +523,7 @@ def test_function_score_with_single_function(): assert "doc['comment_count'] * _score" == sf.script -def test_function_score_from_dict(): +def test_function_score_from_dict() -> None: d = { "function_score": { "filter": {"term": {"tags": "python"}}, @@ -549,7 +552,7 @@ def test_function_score_from_dict(): assert {"boost_factor": 6} == sf.to_dict() -def test_script_score(): +def test_script_score() -> None: d = { "script_score": { "query": {"match_all": {}}, diff --git a/test_opensearchpy/test_helpers/test_result.py b/test_opensearchpy/test_helpers/test_result.py index f07c633b..296553f3 100644 --- a/test_opensearchpy/test_helpers/test_result.py +++ b/test_opensearchpy/test_helpers/test_result.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,6 +27,7 @@ import pickle from datetime import date +from typing import Any from pytest import fixture, raises @@ -35,12 +37,12 @@ from opensearchpy.helpers.response.aggs import AggResponse, Bucket, BucketData -@fixture -def agg_response(aggs_search, aggs_data): +@fixture # type: ignore +def agg_response(aggs_search: Any, aggs_data: Any) -> Any: return response.Response(aggs_search, aggs_data) -def test_agg_response_is_pickleable(agg_response): +def test_agg_response_is_pickleable(agg_response: Any) -> None: agg_response.hits r = pickle.loads(pickle.dumps(agg_response)) @@ -49,7 +51,7 @@ def test_agg_response_is_pickleable(agg_response): assert r.hits == agg_response.hits -def test_response_is_pickleable(dummy_response): +def test_response_is_pickleable(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) res.hits r = pickle.loads(pickle.dumps(res)) @@ -59,7 +61,7 @@ def test_response_is_pickleable(dummy_response): assert r.hits == res.hits -def test_hit_is_pickleable(dummy_response): +def test_hit_is_pickleable(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = pickle.loads(pickle.dumps(res.hits)) @@ -67,14 +69,14 @@ def test_hit_is_pickleable(dummy_response): assert hits[0].meta == res.hits[0].meta -def test_response_stores_search(dummy_response): +def test_response_stores_search(dummy_response: Any) -> None: s = Search() r = response.Response(s, dummy_response) assert r._search is s -def test_interactive_helpers(dummy_response): +def test_interactive_helpers(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = res.hits h = hits[0] @@ -97,19 +99,19 @@ def test_interactive_helpers(dummy_response): ] == repr(h) -def test_empty_response_is_false(dummy_response): +def test_empty_response_is_false(dummy_response: Any) -> None: dummy_response["hits"]["hits"] = [] res = response.Response(Search(), dummy_response) assert not res -def test_len_response(dummy_response): +def test_len_response(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) assert len(res) == 4 -def test_iterating_over_response_gives_you_hits(dummy_response): +def test_iterating_over_response_gives_you_hits(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = list(h for h in res) @@ -126,7 +128,7 @@ def test_iterating_over_response_gives_you_hits(dummy_response): assert hits[1].meta.routing == "opensearch" -def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): +def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) hits = res.hits @@ -134,7 +136,7 @@ def test_hits_get_wrapped_to_contain_additional_attrs(dummy_response): assert 12.0 == hits.max_score -def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): +def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) h = res.hits[0] @@ -150,30 +152,32 @@ def test_hits_provide_dot_and_bracket_access_to_attrs(dummy_response): h.not_there -def test_slicing_on_response_slices_on_hits(dummy_response): +def test_slicing_on_response_slices_on_hits(dummy_response: Any) -> None: res = response.Response(Search(), dummy_response) assert res[0] is res.hits[0] assert res[::-1] == res.hits[::-1] -def test_aggregation_base(agg_response): +def test_aggregation_base(agg_response: Any) -> None: assert agg_response.aggs is agg_response.aggregations assert isinstance(agg_response.aggs, response.AggResponse) -def test_metric_agg_works(agg_response): +def test_metric_agg_works(agg_response: Any) -> None: assert 25052.0 == agg_response.aggs.sum_lines.value -def test_aggregations_can_be_iterated_over(agg_response): +def test_aggregations_can_be_iterated_over(agg_response: Any) -> None: aggs = [a for a in agg_response.aggs] assert len(aggs) == 3 assert all(map(lambda a: isinstance(a, AggResponse), aggs)) -def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): +def test_aggregations_can_be_retrieved_by_name( + agg_response: Any, aggs_search: Any +) -> None: a = agg_response.aggs["popular_files"] assert isinstance(a, BucketData) @@ -181,7 +185,7 @@ def test_aggregations_can_be_retrieved_by_name(agg_response, aggs_search): assert a._meta["aggs"] is aggs_search.aggs.aggs["popular_files"] -def test_bucket_response_can_be_iterated_over(agg_response): +def test_bucket_response_can_be_iterated_over(agg_response: Any) -> None: popular_files = agg_response.aggregations.popular_files buckets = [b for b in popular_files] @@ -189,7 +193,7 @@ def test_bucket_response_can_be_iterated_over(agg_response): assert buckets == popular_files.buckets -def test_bucket_keys_get_deserialized(aggs_data, aggs_search): +def test_bucket_keys_get_deserialized(aggs_data: Any, aggs_search: Any) -> None: class Commit(Document): info = Object(properties={"committed_date": Date()}) diff --git a/test_opensearchpy/test_helpers/test_search.py b/test_opensearchpy/test_helpers/test_search.py index 91c7a709..b44d5dd5 100644 --- a/test_opensearchpy/test_helpers/test_search.py +++ b/test_opensearchpy/test_helpers/test_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. from copy import deepcopy +from typing import Any from pytest import raises @@ -33,51 +35,51 @@ from opensearchpy.helpers import query, search -def test_expand__to_dot_is_respected(): +def test_expand__to_dot_is_respected() -> None: s = search.Search().query("match", a__b=42, _expand__to_dot=False) assert {"query": {"match": {"a__b": 42}}} == s.to_dict() -def test_execute_uses_cache(): - s = search.Search() - r = object() +def test_execute_uses_cache() -> None: + s: Any = search.Search() + r: Any = object() s._response = r assert r is s.execute() -def test_cache_can_be_ignored(mock_client): - s = search.Search(using="mock") - r = object() +def test_cache_can_be_ignored(mock_client: Any) -> None: + s: Any = search.Search(using="mock") + r: Any = object() s._response = r s.execute(ignore_cache=True) mock_client.search.assert_called_once_with(index=None, body={}) -def test_iter_iterates_over_hits(): - s = search.Search() +def test_iter_iterates_over_hits() -> None: + s: Any = search.Search() s._response = [1, 2, 3] assert [1, 2, 3] == list(s) -def test_cache_isnt_cloned(): - s = search.Search() +def test_cache_isnt_cloned() -> None: + s: Any = search.Search() s._response = object() assert not hasattr(s._clone(), "_response") -def test_search_starts_with_no_query(): - s = search.Search() +def test_search_starts_with_no_query() -> None: + s: Any = search.Search() assert s.query._proxied is None -def test_search_query_combines_query(): - s = search.Search() +def test_search_query_combines_query() -> None: + s: Any = search.Search() s2 = s.query("match", f=42) assert s2.query._proxied == query.Match(f=42) @@ -88,8 +90,8 @@ def test_search_query_combines_query(): assert s3.query._proxied == query.Bool(must=[query.Match(f=42), query.Match(f=43)]) -def test_query_can_be_assigned_to(): - s = search.Search() +def test_query_can_be_assigned_to() -> None: + s: Any = search.Search() q = Q("match", title="python") s.query = q @@ -97,8 +99,8 @@ def test_query_can_be_assigned_to(): assert s.query._proxied is q -def test_query_can_be_wrapped(): - s = search.Search().query("match", title="python") +def test_query_can_be_wrapped() -> None: + s: Any = search.Search().query("match", title="python") s.query = Q("function_score", query=s.query, field_value_factor={"field": "rating"}) @@ -112,54 +114,54 @@ def test_query_can_be_wrapped(): } == s.to_dict() -def test_using(): - o = object() - o2 = object() - s = search.Search(using=o) +def test_using() -> None: + o: Any = object() + o2: Any = object() + s: Any = search.Search(using=o) assert s._using is o s2 = s.using(o2) assert s._using is o assert s2._using is o2 -def test_methods_are_proxied_to_the_query(): - s = search.Search().query("match_all") +def test_methods_are_proxied_to_the_query() -> None: + s: Any = search.Search().query("match_all") assert s.query.to_dict() == {"match_all": {}} -def test_query_always_returns_search(): - s = search.Search() +def test_query_always_returns_search() -> None: + s: Any = search.Search() assert isinstance(s.query("match", f=42), search.Search) -def test_source_copied_on_clone(): - s = search.Search().source(False) +def test_source_copied_on_clone() -> None: + s: Any = search.Search().source(False) assert s._clone()._source == s._source assert s._clone()._source is False - s2 = search.Search().source([]) + s2: Any = search.Search().source([]) assert s2._clone()._source == s2._source assert s2._source == [] - s3 = search.Search().source(["some", "fields"]) + s3: Any = search.Search().source(["some", "fields"]) assert s3._clone()._source == s3._source assert s3._clone()._source == ["some", "fields"] -def test_copy_clones(): +def test_copy_clones() -> None: from copy import copy - s1 = search.Search().source(["some", "fields"]) - s2 = copy(s1) + s1: Any = search.Search().source(["some", "fields"]) + s2: Any = copy(s1) assert s1 == s2 assert s1 is not s2 -def test_aggs_allow_two_metric(): - s = search.Search() +def test_aggs_allow_two_metric() -> None: + s: Any = search.Search() s.aggs.metric("a", "max", field="a").metric("b", "max", field="b") @@ -168,8 +170,8 @@ def test_aggs_allow_two_metric(): } -def test_aggs_get_copied_on_change(): - s = search.Search().query("match_all") +def test_aggs_get_copied_on_change() -> None: + s: Any = search.Search().query("match_all") s.aggs.bucket("per_tag", "terms", field="f").metric( "max_score", "max", field="score" ) @@ -181,7 +183,7 @@ def test_aggs_get_copied_on_change(): s4 = s3._clone() s4.aggs.metric("max_score", "max", field="score") - d = { + d: Any = { "query": {"match_all": {}}, "aggs": { "per_tag": { @@ -200,7 +202,7 @@ def test_aggs_get_copied_on_change(): assert d == s4.to_dict() -def test_search_index(): +def test_search_index() -> None: s = search.Search(index="i") assert s._index == ["i"] s = s.index("i2") @@ -231,7 +233,7 @@ def test_search_index(): assert s2._index == ["i", "i2", "i3", "i4", "i5"] -def test_doc_type_document_class(): +def test_doc_type_document_class() -> None: class MyDocument(Document): pass @@ -244,7 +246,7 @@ class MyDocument(Document): assert s._doc_type_map == {} -def test_sort(): +def test_sort() -> None: s = search.Search() s = s.sort("fielda", "-fieldb") @@ -256,7 +258,7 @@ def test_sort(): assert search.Search().to_dict() == s.to_dict() -def test_sort_by_score(): +def test_sort_by_score() -> None: s = search.Search() s = s.sort("_score") assert {"sort": ["_score"]} == s.to_dict() @@ -266,7 +268,7 @@ def test_sort_by_score(): s.sort("-_score") -def test_collapse(): +def test_collapse() -> None: s = search.Search() inner_hits = {"name": "most_recent", "size": 5, "sort": [{"@timestamp": "desc"}]} @@ -300,7 +302,7 @@ def test_collapse(): assert search.Search().to_dict() == s.to_dict() -def test_slice(): +def test_slice() -> None: s = search.Search() assert {"from": 3, "size": 7} == s[3:10].to_dict() assert {"from": 0, "size": 5} == s[:5].to_dict() @@ -309,12 +311,12 @@ def test_slice(): assert {"from": 20, "size": 0} == s[20:0].to_dict() -def test_index(): +def test_index() -> None: s = search.Search() assert {"from": 3, "size": 1} == s[3].to_dict() -def test_search_to_dict(): +def test_search_to_dict() -> None: s = search.Search() assert {} == s.to_dict() @@ -343,7 +345,7 @@ def test_search_to_dict(): assert {"size": 5, "from": 42} == s.to_dict() -def test_complex_example(): +def test_complex_example() -> None: s = search.Search() s = ( s.query("match", title="python") @@ -394,7 +396,7 @@ def test_complex_example(): } == s.to_dict() -def test_reverse(): +def test_reverse() -> None: d = { "query": { "filtered": { @@ -444,13 +446,13 @@ def test_reverse(): assert d == s.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: s = search.Search.from_dict({"size": 5}) assert {"size": 5} == s.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client: Any) -> None: s = search.Search(using="mock") s = s.params(routing="42") s.execute() @@ -458,7 +460,7 @@ def test_params_being_passed_to_search(mock_client): mock_client.search.assert_called_once_with(index=None, body={}, routing="42") -def test_source(): +def test_source() -> None: assert {} == search.Search().source().to_dict() assert { @@ -472,7 +474,7 @@ def test_source(): ).source(["f1", "f2"]).to_dict() -def test_source_on_clone(): +def test_source_on_clone() -> None: assert { "_source": {"includes": ["foo.bar.*"], "excludes": ["foo.one"]}, "query": {"bool": {"filter": [{"term": {"title": "python"}}]}}, @@ -487,7 +489,7 @@ def test_source_on_clone(): } == search.Search().source(False).filter("term", title="python").to_dict() -def test_source_on_clear(): +def test_source_on_clear() -> None: assert ( {} == search.Search() @@ -497,7 +499,7 @@ def test_source_on_clear(): ) -def test_suggest_accepts_global_text(): +def test_suggest_accepts_global_text() -> None: s = search.Search.from_dict( { "suggest": { @@ -519,7 +521,7 @@ def test_suggest_accepts_global_text(): } == s.to_dict() -def test_suggest(): +def test_suggest() -> None: s = search.Search() s = s.suggest("my_suggestion", "pyhton", term={"field": "title"}) @@ -528,7 +530,7 @@ def test_suggest(): } == s.to_dict() -def test_exclude(): +def test_exclude() -> None: s = search.Search() s = s.exclude("match", title="python") @@ -541,7 +543,7 @@ def test_exclude(): } == s.to_dict() -def test_delete_by_query(mock_client): +def test_delete_by_query(mock_client: Any) -> None: s = search.Search(using="mock").query("match", lang="java") s.delete() @@ -550,7 +552,7 @@ def test_delete_by_query(mock_client): ) -def test_update_from_dict(): +def test_update_from_dict() -> None: s = search.Search() s.update_from_dict({"indices_boost": [{"important-documents": 2}]}) s.update_from_dict({"_source": ["id", "name"]}) @@ -561,7 +563,7 @@ def test_update_from_dict(): } == s.to_dict() -def test_rescore_query_to_dict(): +def test_rescore_query_to_dict() -> None: s = search.Search(index="index-name") positive_query = Q( diff --git a/test_opensearchpy/test_helpers/test_update_by_query.py b/test_opensearchpy/test_helpers/test_update_by_query.py index d298a0a0..90e7aa78 100644 --- a/test_opensearchpy/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,18 +26,19 @@ # under the License. from copy import deepcopy +from typing import Any from opensearchpy import Q, UpdateByQuery from opensearchpy.helpers.response import UpdateByQueryResponse -def test_ubq_starts_with_no_query(): +def test_ubq_starts_with_no_query() -> None: ubq = UpdateByQuery() assert ubq.query._proxied is None -def test_ubq_to_dict(): +def test_ubq_to_dict() -> None: ubq = UpdateByQuery() assert {} == ubq.to_dict() @@ -52,7 +54,7 @@ def test_ubq_to_dict(): assert {"extra_q": {"term": {"category": "conference"}}} == ubq.to_dict() -def test_complex_example(): +def test_complex_example() -> None: ubq = UpdateByQuery() ubq = ( ubq.query("match", title="python") @@ -90,7 +92,7 @@ def test_complex_example(): } == ubq.to_dict() -def test_exclude(): +def test_exclude() -> None: ubq = UpdateByQuery() ubq = ubq.exclude("match", title="python") @@ -103,7 +105,7 @@ def test_exclude(): } == ubq.to_dict() -def test_reverse(): +def test_reverse() -> None: d = { "query": { "filtered": { @@ -139,13 +141,13 @@ def test_reverse(): assert d == ubq.to_dict() -def test_from_dict_doesnt_need_query(): +def test_from_dict_doesnt_need_query() -> None: ubq = UpdateByQuery.from_dict({"script": {"source": "test"}}) assert {"script": {"source": "test"}} == ubq.to_dict() -def test_params_being_passed_to_search(mock_client): +def test_params_being_passed_to_search(mock_client: Any) -> None: ubq = UpdateByQuery(using="mock") ubq = ubq.params(routing="42") ubq.execute() @@ -155,7 +157,7 @@ def test_params_being_passed_to_search(mock_client): ) -def test_overwrite_script(): +def test_overwrite_script() -> None: ubq = UpdateByQuery() ubq = ubq.script( source="ctx._source.likes += params.f", lang="painless", params={"f": 3} @@ -171,7 +173,7 @@ def test_overwrite_script(): assert {"script": {"source": "ctx._source.likes++"}} == ubq.to_dict() -def test_update_by_query_response_success(): +def test_update_by_query_response_success() -> None: ubqr = UpdateByQueryResponse({}, {"timed_out": False, "failures": []}) assert ubqr.success() diff --git a/test_opensearchpy/test_helpers/test_utils.py b/test_opensearchpy/test_helpers/test_utils.py index 7a620736..b6949833 100644 --- a/test_opensearchpy/test_helpers/test_utils.py +++ b/test_opensearchpy/test_helpers/test_utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -32,21 +33,21 @@ from opensearchpy.helpers import utils -def test_attrdict_pickle(): +def test_attrdict_pickle() -> None: ad = utils.AttrDict({}) pickled_ad = pickle.dumps(ad) assert ad == pickle.loads(pickled_ad) -def test_attrlist_pickle(): +def test_attrlist_pickle() -> None: al = utils.AttrList([]) pickled_al = pickle.dumps(al) assert al == pickle.loads(pickled_al) -def test_attrlist_slice(): +def test_attrlist_slice() -> None: class MyAttrDict(utils.AttrDict): pass @@ -54,7 +55,7 @@ class MyAttrDict(utils.AttrDict): assert isinstance(ls[:][0], MyAttrDict) -def test_merge(): +def test_merge() -> None: a = utils.AttrDict({"a": {"b": 42, "c": 47}}) b = {"a": {"b": 123, "d": -12}, "e": [1, 2, 3]} @@ -63,7 +64,7 @@ def test_merge(): assert a == {"a": {"b": 123, "c": 47, "d": -12}, "e": [1, 2, 3]} -def test_merge_conflict(): +def test_merge_conflict() -> None: for d in ( {"a": 42}, {"a": {"b": 47}}, @@ -73,7 +74,7 @@ def test_merge_conflict(): utils.merge({"a": {"b": 42}}, d, True) -def test_attrdict_bool(): +def test_attrdict_bool() -> None: d = utils.AttrDict({}) assert not d @@ -81,7 +82,7 @@ def test_attrdict_bool(): assert d -def test_attrlist_items_get_wrapped_during_iteration(): +def test_attrlist_items_get_wrapped_during_iteration() -> None: al = utils.AttrList([1, object(), [1], {}]) ls = list(iter(al)) @@ -90,7 +91,7 @@ def test_attrlist_items_get_wrapped_during_iteration(): assert isinstance(ls[3], utils.AttrDict) -def test_serializer_deals_with_Attr_versions(): +def test_serializer_deals_with_Attr_versions() -> None: d = utils.AttrDict({"key": utils.AttrList([1, 2, 3])}) assert serializer.serializer.dumps(d) == serializer.serializer.dumps( @@ -98,21 +99,21 @@ def test_serializer_deals_with_Attr_versions(): ) -def test_serializer_deals_with_objects_with_to_dict(): +def test_serializer_deals_with_objects_with_to_dict() -> None: class MyClass(object): - def to_dict(self): + def to_dict(self) -> int: return 42 assert serializer.serializer.dumps(MyClass()) == "42" -def test_recursive_to_dict(): +def test_recursive_to_dict() -> None: assert utils.recursive_to_dict({"k": [1, (1.0, {"v": Q("match", key="val")})]}) == { "k": [1, (1.0, {"v": {"match": {"key": "val"}}})] } -def test_attrdict_get(): +def test_attrdict_get() -> None: a = utils.AttrDict({"a": {"b": 42, "c": 47}}) assert a.get("a", {}).get("b", 0) == 42 assert a.get("a", {}).get("e", 0) == 0 diff --git a/test_opensearchpy/test_helpers/test_validation.py b/test_opensearchpy/test_helpers/test_validation.py index b86f8002..6841f604 100644 --- a/test_opensearchpy/test_helpers/test_validation.py +++ b/test_opensearchpy/test_helpers/test_validation.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any from pytest import raises @@ -42,10 +44,10 @@ class Author(InnerDoc): - name = Text(required=True) - email = Text(required=True) + name: Any = Text(required=True) + email: Any = Text(required=True) - def clean(self): + def clean(self) -> None: print(self, type(self), self.name) if self.name.lower() not in self.email: raise ValidationException("Invalid email!") @@ -62,7 +64,7 @@ class BlogPostWithStatus(Document): class AutoNowDate(Date): - def clean(self, data): + def clean(self, data: Any) -> Any: if data is None: data = datetime.now() return super(AutoNowDate, self).clean(data) @@ -73,15 +75,15 @@ class Log(Document): data = Text() -def test_required_int_can_be_0(): +def test_required_int_can_be_0() -> None: class DT(Document): i = Integer(required=True) - dt = DT(i=0) + dt: Any = DT(i=0) assert dt.full_clean() is None -def test_required_field_cannot_be_empty_list(): +def test_required_field_cannot_be_empty_list() -> None: class DT(Document): i = Integer(required=True) @@ -90,49 +92,49 @@ class DT(Document): dt.full_clean() -def test_validation_works_for_lists_of_values(): +def test_validation_works_for_lists_of_values() -> None: class DT(Document): i = Date(required=True) - dt = DT(i=[datetime.now(), "not date"]) + dt1: Any = DT(i=[datetime.now(), "not date"]) with raises(ValidationException): - dt.full_clean() + dt1.full_clean() - dt = DT(i=[datetime.now(), datetime.now()]) - assert None is dt.full_clean() + dt2: Any = DT(i=[datetime.now(), datetime.now()]) + assert None is dt2.full_clean() -def test_field_with_custom_clean(): +def test_field_with_custom_clean() -> None: ls = Log() ls.full_clean() assert isinstance(ls.timestamp, datetime) -def test_empty_object(): - d = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) +def test_empty_object() -> None: + d: Any = BlogPost(authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}]) d.inner = {} d.full_clean() -def test_missing_required_field_raises_validation_exception(): - d = BlogPost() +def test_missing_required_field_raises_validation_exception() -> None: + d1: Any = BlogPost() with raises(ValidationException): - d.full_clean() + d1.full_clean() - d = BlogPost() - d.authors.append({"name": "Guian"}) + d2: Any = BlogPost() + d2.authors.append({"name": "Guian"}) with raises(ValidationException): - d.full_clean() + d2.full_clean() - d = BlogPost() - d.authors.append({"name": "Guian", "email": "guiang@bitquilltech.com"}) - d.full_clean() + d3: Any = BlogPost() + d3.authors.append({"name": "Guian", "email": "guiang@bitquilltech.com"}) + d3.full_clean() -def test_boolean_doesnt_treat_false_as_empty(): - d = BlogPostWithStatus() +def test_boolean_doesnt_treat_false_as_empty() -> None: + d: Any = BlogPostWithStatus() with raises(ValidationException): d.full_clean() d.published = False @@ -141,8 +143,10 @@ def test_boolean_doesnt_treat_false_as_empty(): d.full_clean() -def test_custom_validation_on_nested_gets_run(): - d = BlogPost(authors=[Author(name="Guian", email="king@example.com")], created=None) +def test_custom_validation_on_nested_gets_run() -> None: + d: Any = BlogPost( + authors=[Author(name="Guian", email="king@example.com")], created=None + ) assert isinstance(d.authors[0], Author) @@ -150,8 +154,8 @@ def test_custom_validation_on_nested_gets_run(): d.full_clean() -def test_accessing_known_fields_returns_empty_value(): - d = BlogPost() +def test_accessing_known_fields_returns_empty_value() -> None: + d: Any = BlogPost() assert [] == d.authors @@ -160,8 +164,8 @@ def test_accessing_known_fields_returns_empty_value(): assert None is d.authors[0].email -def test_empty_values_are_not_serialized(): - d = BlogPost( +def test_empty_values_are_not_serialized() -> None: + d: Any = BlogPost( authors=[{"name": "Guian", "email": "guiang@bitquilltech.com"}], created=None ) diff --git a/test_opensearchpy/test_helpers/test_wrappers.py b/test_opensearchpy/test_helpers/test_wrappers.py index c05b9fc3..37ea76b8 100644 --- a/test_opensearchpy/test_helpers/test_wrappers.py +++ b/test_opensearchpy/test_helpers/test_wrappers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,13 +26,14 @@ # under the License. from datetime import datetime, timedelta +from typing import Any import pytest from opensearchpy import Range -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "kwargs, item", [ ({}, 1), @@ -43,11 +45,11 @@ ({"gt": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_contains(kwargs, item): +def test_range_contains(kwargs: Any, item: Any) -> None: assert item in Range(**kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "kwargs, item", [ ({"gt": -1}, -1), @@ -57,11 +59,11 @@ def test_range_contains(kwargs, item): ({"lte": datetime.now() - timedelta(seconds=10)}, datetime.now()), ], ) -def test_range_not_contains(kwargs, item): +def test_range_not_contains(kwargs: Any, item: Any) -> None: assert item not in Range(**kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "args,kwargs", [ (({},), {"lt": 42}), @@ -71,12 +73,12 @@ def test_range_not_contains(kwargs, item): ((), {"gt": 1, "gte": 1}), ], ) -def test_range_raises_value_error_on_wrong_params(args, kwargs): +def test_range_raises_value_error_on_wrong_params(args: Any, kwargs: Any) -> None: with pytest.raises(ValueError): Range(*args, **kwargs) -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "range,lower,inclusive", [ (Range(gt=1), 1, False), @@ -85,11 +87,11 @@ def test_range_raises_value_error_on_wrong_params(args, kwargs): (Range(lt=42), None, False), ], ) -def test_range_lower(range, lower, inclusive): +def test_range_lower(range: Any, lower: Any, inclusive: Any) -> None: assert (lower, inclusive) == range.lower -@pytest.mark.parametrize( +@pytest.mark.parametrize( # type: ignore "range,upper,inclusive", [ (Range(lt=1), 1, False), @@ -98,5 +100,5 @@ def test_range_lower(range, lower, inclusive): (Range(gt=42), None, False), ], ) -def test_range_upper(range, upper, inclusive): +def test_range_upper(range: Any, upper: Any, inclusive: Any) -> None: assert (upper, inclusive) == range.upper diff --git a/test_opensearchpy/test_serializer.py b/test_opensearchpy/test_serializer.py index b324b53c..d425fabf 100644 --- a/test_opensearchpy/test_serializer.py +++ b/test_opensearchpy/test_serializer.py @@ -30,6 +30,7 @@ import uuid from datetime import datetime from decimal import Decimal +from typing import Any try: import numpy as np @@ -48,26 +49,26 @@ from .test_cases import SkipTest, TestCase -def requires_numpy_and_pandas(): +def requires_numpy_and_pandas() -> None: if np is None or pd is None: raise SkipTest("Test requires numpy or pandas to be available") class TestJSONSerializer(TestCase): - def test_datetime_serialization(self): + def test_datetime_serialization(self) -> None: self.assertEqual( '{"d":"2010-10-01T02:30:00"}', JSONSerializer().dumps({"d": datetime(2010, 10, 1, 2, 30)}), ) - def test_decimal_serialization(self): + def test_decimal_serialization(self) -> None: requires_numpy_and_pandas() if sys.version_info[:2] == (2, 6): raise SkipTest("Float rounding is broken in 2.6.") self.assertEqual('{"d":3.8}', JSONSerializer().dumps({"d": Decimal("3.8")})) - def test_uuid_serialization(self): + def test_uuid_serialization(self) -> None: self.assertEqual( '{"d":"00000000-0000-0000-0000-000000000003"}', JSONSerializer().dumps( @@ -75,12 +76,12 @@ def test_uuid_serialization(self): ), ) - def test_serializes_numpy_bool(self): + def test_serializes_numpy_bool(self) -> None: requires_numpy_and_pandas() self.assertEqual('{"d":true}', JSONSerializer().dumps({"d": np.bool_(True)})) - def test_serializes_numpy_integers(self): + def test_serializes_numpy_integers(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -101,7 +102,7 @@ def test_serializes_numpy_integers(self): ): self.assertEqual(ser.dumps({"d": np_type(1)}), '{"d":1}') - def test_serializes_numpy_floats(self): + def test_serializes_numpy_floats(self) -> None: requires_numpy_and_pandas() ser = JSONSerializer() @@ -114,7 +115,7 @@ def test_serializes_numpy_floats(self): ser.dumps({"d": np_type(1.2)}), r'^\{"d":1\.2[\d]*}$' ) - def test_serializes_numpy_datetime(self): + def test_serializes_numpy_datetime(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -122,7 +123,7 @@ def test_serializes_numpy_datetime(self): JSONSerializer().dumps({"d": np.datetime64("2010-10-01T02:30:00")}), ) - def test_serializes_numpy_ndarray(self): + def test_serializes_numpy_ndarray(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -135,7 +136,7 @@ def test_serializes_numpy_ndarray(self): JSONSerializer().dumps({"d": np.zeros((2, 2), dtype=np.uint8)}), ) - def test_serializes_numpy_nan_to_nan(self): + def test_serializes_numpy_nan_to_nan(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -143,7 +144,7 @@ def test_serializes_numpy_nan_to_nan(self): JSONSerializer().dumps({"d": np.nan}), ) - def test_serializes_pandas_timestamp(self): + def test_serializes_pandas_timestamp(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -151,7 +152,7 @@ def test_serializes_pandas_timestamp(self): JSONSerializer().dumps({"d": pd.Timestamp("2010-10-01T02:30:00")}), ) - def test_serializes_pandas_series(self): + def test_serializes_pandas_series(self) -> None: requires_numpy_and_pandas() self.assertEqual( @@ -159,7 +160,7 @@ def test_serializes_pandas_series(self): JSONSerializer().dumps({"d": pd.Series(["a", "b", "c", "d"])}), ) - def test_serializes_pandas_na(self): + def test_serializes_pandas_na(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NA"): # pandas.NA added in v1 @@ -169,14 +170,14 @@ def test_serializes_pandas_na(self): JSONSerializer().dumps({"d": pd.NA}), ) - def test_raises_serialization_error_pandas_nat(self): + def test_raises_serialization_error_pandas_nat(self) -> None: requires_numpy_and_pandas() if not hasattr(pd, "NaT"): raise SkipTest("pandas.NaT required") self.assertRaises(SerializationError, JSONSerializer().dumps, {"d": pd.NaT}) - def test_serializes_pandas_category(self): + def test_serializes_pandas_category(self) -> None: requires_numpy_and_pandas() cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) @@ -191,34 +192,34 @@ def test_serializes_pandas_category(self): JSONSerializer().dumps({"d": cat}), ) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().dumps, object()) - def test_raises_serialization_error_on_load_error(self): + def test_raises_serialization_error_on_load_error(self) -> None: self.assertRaises(SerializationError, JSONSerializer().loads, object()) self.assertRaises(SerializationError, JSONSerializer().loads, "") self.assertRaises(SerializationError, JSONSerializer().loads, "{{") - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", JSONSerializer().dumps("你好")) class TestTextSerializer(TestCase): - def test_strings_are_left_untouched(self): + def test_strings_are_left_untouched(self) -> None: self.assertEqual("你好", TextSerializer().dumps("你好")) - def test_raises_serialization_error_on_dump_error(self): + def test_raises_serialization_error_on_dump_error(self) -> None: self.assertRaises(SerializationError, TextSerializer().dumps, {}) class TestDeserializer(TestCase): - def setup_method(self, _): + def setup_method(self, _: Any) -> None: self.de = Deserializer(DEFAULT_SERIALIZERS) - def test_deserializes_json_by_default(self): + def test_deserializes_json_by_default(self) -> None: self.assertEqual({"some": "data"}, self.de.loads('{"some":"data"}')) - def test_deserializes_text_with_correct_ct(self): + def test_deserializes_text_with_correct_ct(self) -> None: self.assertEqual( '{"some":"data"}', self.de.loads('{"some":"data"}', "text/plain") ) @@ -227,10 +228,10 @@ def test_deserializes_text_with_correct_ct(self): self.de.loads('{"some":"data"}', "text/plain; charset=whatever"), ) - def test_raises_serialization_error_on_unknown_mimetype(self): + def test_raises_serialization_error_on_unknown_mimetype(self) -> None: self.assertRaises(SerializationError, self.de.loads, "{}", "text/html") def test_raises_improperly_configured_when_default_mimetype_cannot_be_deserialized( self, - ): + ) -> None: self.assertRaises(ImproperlyConfigured, Deserializer, {}) diff --git a/test_opensearchpy/test_server/__init__.py b/test_opensearchpy/test_server/__init__.py index 78d29958..f6856bc0 100644 --- a/test_opensearchpy/test_server/__init__.py +++ b/test_opensearchpy/test_server/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. +from typing import Any from unittest import SkipTest from opensearchpy.helpers import test @@ -33,7 +35,7 @@ client = None -def get_client(**kwargs): +def get_client(**kwargs: Any) -> Any: global client if client is False: raise SkipTest("No client is available") @@ -59,11 +61,11 @@ def get_client(**kwargs): return new_client -def setup_module(): +def setup_module() -> None: get_client() class OpenSearchTestCase(BaseTestCase): @staticmethod - def _get_client(**kwargs): + def _get_client(**kwargs: Any) -> Any: return get_client(**kwargs) diff --git a/test_opensearchpy/test_server/conftest.py b/test_opensearchpy/test_server/conftest.py index 03306fcf..7acd581b 100644 --- a/test_opensearchpy/test_server/conftest.py +++ b/test_opensearchpy/test_server/conftest.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -27,6 +28,7 @@ import os import time +from typing import Any import pytest @@ -39,11 +41,11 @@ # Used for OPENSEARCH_VERSION = "" OPENSEARCH_BUILD_HASH = "" -OPENSEARCH_REST_API_TESTS = [] +OPENSEARCH_REST_API_TESTS: Any = [] -@pytest.fixture(scope="session") -def sync_client_factory(): +@pytest.fixture(scope="session") # type: ignore +def sync_client_factory() -> Any: client = None try: # Configure the client optionally with an HTTP conn class @@ -62,7 +64,7 @@ def sync_client_factory(): # We do this little dance with the URL to force # Requests to respect 'headers: None' within rest API spec tests. client = opensearchpy.OpenSearch( - OPENSEARCH_URL.replace("elastic:changeme@", ""), **kw + OPENSEARCH_URL.replace("elastic:changeme@", ""), **kw # type: ignore ) # Wait for the cluster to report a status of 'yellow' @@ -82,8 +84,8 @@ def sync_client_factory(): client.close() -@pytest.fixture(scope="function") -def sync_client(sync_client_factory): +@pytest.fixture(scope="function") # type: ignore +def sync_client(sync_client_factory: Any) -> Any: try: yield sync_client_factory finally: diff --git a/test_opensearchpy/test_server/test_clients.py b/test_opensearchpy/test_server/test_clients.py index 2d5c4155..32550a03 100644 --- a/test_opensearchpy/test_server/test_clients.py +++ b/test_opensearchpy/test_server/test_clients.py @@ -32,19 +32,19 @@ class TestUnicode(OpenSearchTestCase): - def test_indices_analyze(self): + def test_indices_analyze(self) -> None: self.client.indices.analyze(body='{"text": "привет"}') class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_string_body(self): + def test_bulk_works_with_string_body(self) -> None: docs = '{ "index" : { "_index" : "bulk_test_index", "_id" : "1" } }\n{"answer": 42}' response = self.client.bulk(body=docs) self.assertFalse(response["errors"]) self.assertEqual(1, len(response["items"])) - def test_bulk_works_with_bytestring_body(self): + def test_bulk_works_with_bytestring_body(self) -> None: docs = b'{ "index" : { "_index" : "bulk_test_index", "_id" : "2" } }\n{"answer": 42}' response = self.client.bulk(body=docs) diff --git a/test_opensearchpy/test_server/test_helpers/__init__.py b/test_opensearchpy/test_server/test_helpers/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_server/test_helpers/__init__.py +++ b/test_opensearchpy/test_server/test_helpers/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_helpers/conftest.py b/test_opensearchpy/test_server/test_helpers/conftest.py index 4e167d34..35c92e14 100644 --- a/test_opensearchpy/test_server/test_helpers/conftest.py +++ b/test_opensearchpy/test_server/test_helpers/conftest.py @@ -27,6 +27,7 @@ import re from datetime import datetime +from typing import Any from pytest import fixture @@ -44,32 +45,32 @@ from .test_document import Comment, History, PullRequest, User -@fixture(scope="session") -def client(): +@fixture(scope="session") # type: ignore +def client() -> Any: client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", client) return client -@fixture(scope="session") -def opensearch_version(client): +@fixture(scope="session") # type: ignore +def opensearch_version(client: Any) -> Any: info = client.info() print(info) yield tuple( int(x) - for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") + for x in re.match(r"^([0-9.]+)", info["version"]["number"]).group(1).split(".") # type: ignore ) -@fixture -def write_client(client): +@fixture # type: ignore +def write_client(client: Any) -> Any: yield client client.indices.delete("test-*", ignore=404) client.indices.delete_template("test-template", ignore=404) -@fixture(scope="session") -def data_client(client): +@fixture(scope="session") # type: ignore +def data_client(client: Any) -> Any: # create mappings create_git_index(client, "git") create_flat_git_index(client, "flat-git") @@ -81,8 +82,8 @@ def data_client(client): client.indices.delete("flat-git", ignore=404) -@fixture -def pull_request(write_client): +@fixture # type: ignore +def pull_request(write_client: Any) -> Any: PullRequest.init() pr = PullRequest( _id=42, @@ -105,8 +106,8 @@ def pull_request(write_client): return pr -@fixture -def setup_ubq_tests(client): +@fixture # type: ignore +def setup_ubq_tests(client: Any) -> str: index = "test-git" create_git_index(client, index) bulk(client, TEST_GIT_DATA, raise_on_error=True, refresh=True) diff --git a/test_opensearchpy/test_server/test_helpers/test_actions.py b/test_opensearchpy/test_server/test_helpers/test_actions.py index 2230edb0..ab5f66e2 100644 --- a/test_opensearchpy/test_server/test_helpers/test_actions.py +++ b/test_opensearchpy/test_server/test_helpers/test_actions.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,8 @@ # under the License. +from typing import Any + from mock import patch from opensearchpy import TransportError, helpers @@ -36,15 +39,18 @@ class FailingBulkClient(object): def __init__( - self, client, fail_at=(2,), fail_with=TransportError(599, "Error!", {}) - ): + self, + client: Any, + fail_at: Any = (2,), + fail_with: Any = TransportError(599, "Error!", {}), + ) -> None: self.client = client self._called = 0 self._fail_at = fail_at self.transport = client.transport self._fail_with = fail_with - def bulk(self, *args, **kwargs): + def bulk(self, *args: Any, **kwargs: Any) -> Any: self._called += 1 if self._called in self._fail_at: raise self._fail_with @@ -52,7 +58,7 @@ def bulk(self, *args, **kwargs): class TestStreamingBulk(OpenSearchTestCase): - def test_actions_remain_unchanged(self): + def test_actions_remain_unchanged(self) -> None: actions = [{"_id": 1}, {"_id": 2}] for ok, item in helpers.streaming_bulk( self.client, actions, index="test-index" @@ -60,7 +66,7 @@ def test_actions_remain_unchanged(self): self.assertTrue(ok) self.assertEqual([{"_id": 1}, {"_id": 2}], actions) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] for ok, item in helpers.streaming_bulk( self.client, docs, index="test-index", refresh=True @@ -72,7 +78,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_all_errors_from_chunk_are_raised_on_failure(self): + def test_all_errors_from_chunk_are_raised_on_failure(self) -> None: self.client.indices.create( "i", { @@ -92,7 +98,7 @@ def test_all_errors_from_chunk_are_raised_on_failure(self): else: assert False, "exception should have been raised" - def test_different_op_types(self): + def test_different_op_types(self) -> Any: if self.opensearch_version() < (0, 90, 1): raise SkipTest("update supported since 0.90.1") self.client.index(index="i", id=45, body={}) @@ -114,7 +120,7 @@ def test_different_op_types(self): self.assertEqual({"answer": 42}, self.client.get(index="i", id=42)["_source"]) self.assertEqual({"f": "v"}, self.client.get(index="i", id=47)["_source"]) - def test_transport_error_can_becaught(self): + def test_transport_error_can_becaught(self) -> None: failing_client = FailingBulkClient(self.client) docs = [ {"_index": "i", "_id": 47, "f": "v"}, @@ -150,7 +156,7 @@ def test_transport_error_can_becaught(self): results[1][1], ) - def test_rejected_documents_are_retried(self): + def test_rejected_documents_are_retried(self) -> None: failing_client = FailingBulkClient( self.client, fail_with=TransportError(429, "Rejected!", {}) ) @@ -177,7 +183,7 @@ def test_rejected_documents_are_retried(self): self.assertEqual({"value": 3, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_rejected_documents_are_retried_at_most_max_retries_times(self): + def test_rejected_documents_are_retried_at_most_max_retries_times(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2), fail_with=TransportError(429, "Rejected!", {}) ) @@ -205,14 +211,14 @@ def test_rejected_documents_are_retried_at_most_max_retries_times(self): self.assertEqual({"value": 2, "relation": "eq"}, res["hits"]["total"]) self.assertEqual(4, failing_client._called) - def test_transport_error_is_raised_with_max_retries(self): + def test_transport_error_is_raised_with_max_retries(self) -> None: failing_client = FailingBulkClient( self.client, fail_at=(1, 2, 3, 4), fail_with=TransportError(429, "Rejected!", {}), ) - def streaming_bulk(): + def streaming_bulk() -> Any: results = list( helpers.streaming_bulk( failing_client, @@ -229,7 +235,7 @@ def streaming_bulk(): class TestBulk(OpenSearchTestCase): - def test_bulk_works_with_single_item(self): + def test_bulk_works_with_single_item(self) -> None: docs = [{"answer": 42, "_id": 1}] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -242,7 +248,7 @@ def test_bulk_works_with_single_item(self): {"answer": 42}, self.client.get(index="test-index", id=1)["_source"] ) - def test_all_documents_get_inserted(self): + def test_all_documents_get_inserted(self) -> None: docs = [{"answer": x, "_id": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True @@ -255,7 +261,7 @@ def test_all_documents_get_inserted(self): {"answer": 42}, self.client.get(index="test-index", id=42)["_source"] ) - def test_stats_only_reports_numbers(self): + def test_stats_only_reports_numbers(self) -> None: docs = [{"answer": x} for x in range(100)] success, failed = helpers.bulk( self.client, docs, index="test-index", refresh=True, stats_only=True @@ -265,7 +271,7 @@ def test_stats_only_reports_numbers(self): self.assertEqual(0, failed) self.assertEqual(100, self.client.count(index="test-index")["count"]) - def test_errors_are_reported_correctly(self): + def test_errors_are_reported_correctly(self) -> None: self.client.indices.create( "i", { @@ -292,7 +298,7 @@ def test_errors_are_reported_correctly(self): or "mapper_parsing_exception" in repr(error["index"]["error"]) ) - def test_error_is_raised(self): + def test_error_is_raised(self) -> None: self.client.indices.create( "i", { @@ -310,7 +316,7 @@ def test_error_is_raised(self): index="i", ) - def test_ignore_error_if_raised(self): + def test_ignore_error_if_raised(self) -> None: # ignore the status code 400 in tuple helpers.bulk( self.client, [{"a": 42}, {"a": "c"}], index="i", ignore_status=(400,) @@ -343,7 +349,7 @@ def test_ignore_error_if_raised(self): failing_client = FailingBulkClient(self.client) helpers.bulk(failing_client, [{"a": 42}], index="i", ignore_status=(599,)) - def test_errors_are_collected_properly(self): + def test_errors_are_collected_properly(self) -> None: self.client.indices.create( "i", { @@ -378,12 +384,12 @@ class TestScan(OpenSearchTestCase): }, ] - def teardown_method(self, m): + def teardown_method(self, m: Any) -> None: self.client.transport.perform_request("DELETE", "/_search/scroll/_all") super(TestScan, self).teardown_method(m) - def test_order_can_be_preserved(self): - bulk = [] + def test_order_can_be_preserved(self) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -402,8 +408,8 @@ def test_order_can_be_preserved(self): self.assertEqual(list(map(str, range(100))), list(d["_id"] for d in docs)) self.assertEqual(list(range(100)), list(d["_source"]["answer"] for d in docs)) - def test_all_documents_are_read(self): - bulk = [] + def test_all_documents_are_read(self) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append({"answer": x, "correct": x == 42}) @@ -415,8 +421,8 @@ def test_all_documents_are_read(self): self.assertEqual(set(map(str, range(100))), set(d["_id"] for d in docs)) self.assertEqual(set(range(100)), set(d["_source"]["answer"] for d in docs)) - def test_scroll_error(self): - bulk = [] + def test_scroll_error(self) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -450,7 +456,7 @@ def test_scroll_error(self): self.assertEqual(len(data), 3) self.assertEqual(data[-1], {"scroll_data": 42}) - def test_initial_search_error(self): + def test_initial_search_error(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -476,7 +482,7 @@ def test_initial_search_error(self): self.assertEqual(data, [{"search_data": 1}]) client_mock.scroll.assert_not_called() - def test_no_scroll_id_fast_route(self): + def test_no_scroll_id_fast_route(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = {"no": "_scroll_id"} data = list(helpers.scan(self.client, index="test_index")) @@ -485,7 +491,7 @@ def test_no_scroll_id_fast_route(self): client_mock.scroll.assert_not_called() client_mock.clear_scroll.assert_not_called() - def test_scan_auth_kwargs_forwarded(self): + def test_scan_auth_kwargs_forwarded(self) -> None: for key, val in { "api_key": ("name", "value"), "http_auth": ("username", "password"), @@ -504,7 +510,7 @@ def test_scan_auth_kwargs_forwarded(self): } client_mock.clear_scroll.return_value = {} - data = list(helpers.scan(self.client, index="test_index", **{key: val})) + data = list(helpers.scan(self.client, index="test_index", **{key: val})) # type: ignore self.assertEqual(data, [{"search_data": 1}]) @@ -517,7 +523,7 @@ def test_scan_auth_kwargs_forwarded(self): ): self.assertEqual(api_mock.call_args[1][key], val) - def test_scan_auth_kwargs_favor_scroll_kwargs_option(self): + def test_scan_auth_kwargs_favor_scroll_kwargs_option(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "scroll_id", @@ -549,8 +555,8 @@ def test_scan_auth_kwargs_favor_scroll_kwargs_option(self): self.assertEqual(client_mock.scroll.call_args[1]["sort"], "asc") @patch("opensearchpy.helpers.actions.logger") - def test_logger(self, logger_mock): - bulk = [] + def test_logger(self, logger_mock: Any) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -584,8 +590,8 @@ def test_logger(self, logger_mock): pass logger_mock.warning.assert_called() - def test_clear_scroll(self): - bulk = [] + def test_clear_scroll(self) -> None: + bulk: Any = [] for x in range(4): bulk.append({"index": {"_index": "test_index"}}) bulk.append({"value": x}) @@ -611,7 +617,7 @@ def test_clear_scroll(self): ) spy.assert_not_called() - def test_shards_no_skipped_field(self): + def test_shards_no_skipped_field(self) -> None: with patch.object(self, "client") as client_mock: client_mock.search.return_value = { "_scroll_id": "dummy_id", @@ -640,8 +646,8 @@ def test_shards_no_skipped_field(self): class TestReindex(OpenSearchTestCase): - def setup_method(self, _): - bulk = [] + def setup_method(self, _: Any) -> None: + bulk: Any = [] for x in range(100): bulk.append({"index": {"_index": "test_index", "_id": x}}) bulk.append( @@ -653,7 +659,7 @@ def setup_method(self, _): ) self.client.bulk(bulk, refresh=True) - def test_reindex_passes_kwargs_to_scan_and_bulk(self): + def test_reindex_passes_kwargs_to_scan_and_bulk(self) -> None: helpers.reindex( self.client, "test_index", @@ -672,7 +678,7 @@ def test_reindex_passes_kwargs_to_scan_and_bulk(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_reindex_accepts_a_query(self): + def test_reindex_accepts_a_query(self) -> None: helpers.reindex( self.client, "test_index", @@ -691,7 +697,7 @@ def test_reindex_accepts_a_query(self): self.client.get(index="prod_index", id=42)["_source"], ) - def test_all_documents_get_moved(self): + def test_all_documents_get_moved(self) -> None: helpers.reindex(self.client, "test_index", "prod_index") self.client.indices.refresh() @@ -710,7 +716,7 @@ def test_all_documents_get_moved(self): class TestParentChildReindex(OpenSearchTestCase): - def setup_method(self, _): + def setup_method(self, _: Any) -> None: body = { "settings": {"number_of_shards": 1, "number_of_replicas": 0}, "mappings": { @@ -736,7 +742,7 @@ def setup_method(self, _): ) self.client.indices.refresh(index="test-index") - def test_children_are_reindexed_correctly(self): + def test_children_are_reindexed_correctly(self) -> None: helpers.reindex(self.client, "test-index", "real-index") self.assertEqual( diff --git a/test_opensearchpy/test_server/test_helpers/test_analysis.py b/test_opensearchpy/test_server/test_helpers/test_analysis.py index 9b4f5849..e965e05b 100644 --- a/test_opensearchpy/test_server/test_helpers/test_analysis.py +++ b/test_opensearchpy/test_server/test_helpers/test_analysis.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,10 +25,12 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy import analyzer, token_filter, tokenizer -def test_simulate_with_just__builtin_tokenizer(client): +def test_simulate_with_just__builtin_tokenizer(client: Any) -> None: a = analyzer("my-analyzer", tokenizer="keyword") tokens = a.simulate("Hello World!", using=client).tokens @@ -35,7 +38,7 @@ def test_simulate_with_just__builtin_tokenizer(client): assert tokens[0].token == "Hello World!" -def test_simulate_complex(client): +def test_simulate_complex(client: Any) -> None: a = analyzer( "my-analyzer", tokenizer=tokenizer("split_words", "simple_pattern_split", pattern=":"), @@ -48,7 +51,7 @@ def test_simulate_complex(client): assert ["this", "works"] == [t.token for t in tokens] -def test_simulate_builtin(client): +def test_simulate_builtin(client: Any) -> None: a = analyzer("my-analyzer", "english") tokens = a.simulate("fixes running").tokens diff --git a/test_opensearchpy/test_server/test_helpers/test_count.py b/test_opensearchpy/test_server/test_helpers/test_count.py index f8aa612a..65f424d1 100644 --- a/test_opensearchpy/test_server/test_helpers/test_count.py +++ b/test_opensearchpy/test_server/test_helpers/test_count.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,15 +25,17 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.helpers.search import Q, Search -def test_count_all(data_client): +def test_count_all(data_client: Any) -> None: s = Search(using=data_client).index("git") assert 53 == s.count() -def test_count_prefetch(data_client, mocker): +def test_count_prefetch(data_client: Any, mocker: Any) -> None: mocker.spy(data_client, "count") search = Search(using=data_client).index("git") @@ -45,7 +48,7 @@ def test_count_prefetch(data_client, mocker): assert data_client.count.call_count == 1 -def test_count_filter(data_client): +def test_count_filter(data_client: Any) -> None: s = Search(using=data_client).index("git").filter(~Q("exists", field="parent_shas")) # initial commit + repo document assert 2 == s.count() diff --git a/test_opensearchpy/test_server/test_helpers/test_data.py b/test_opensearchpy/test_server/test_helpers/test_data.py index 20b63e39..11ad915f 100644 --- a/test_opensearchpy/test_server/test_helpers/test_data.py +++ b/test_opensearchpy/test_server/test_helpers/test_data.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,8 +27,10 @@ from __future__ import unicode_literals +from typing import Any, Dict -def create_flat_git_index(client, index): + +def create_flat_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -70,7 +73,7 @@ def create_flat_git_index(client, index): ) -def create_git_index(client, index): +def create_git_index(client: Any, index: Any) -> None: # we will use user on several places user_mapping = { "properties": {"name": {"type": "text", "fields": {"raw": {"type": "keyword"}}}} @@ -964,7 +967,7 @@ def create_git_index(client, index): ], "committer": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "stats": {"deletions": 0, "insertions": 53, "lines": 53, "files": 2}, - "description": "From_dict, Q(dict) and bool query parses it's subqueries", + "description": "From_dict, Q(dict) and bool query parses its subqueries", "author": {"name": "Honza Kr\xe1l", "email": "honza.kral@gmail.com"}, "parent_shas": ["d407f99d1959b7b862a541c066d9fd737ce913f3"], "committed_date": "2014-03-06T20:24:30", @@ -1092,7 +1095,7 @@ def create_git_index(client, index): ] -def flatten_doc(d): +def flatten_doc(d: Any) -> Dict[str, Any]: src = d["_source"].copy() del src["commit_repo"] return {"_index": "flat-git", "_id": d["_id"], "_source": src} @@ -1101,7 +1104,7 @@ def flatten_doc(d): FLAT_DATA = [flatten_doc(d) for d in DATA if "routing" in d] -def create_test_git_data(d): +def create_test_git_data(d: Any) -> Dict[str, Any]: src = d["_source"].copy() return { "_index": "test-git", diff --git a/test_opensearchpy/test_server/test_helpers/test_document.py b/test_opensearchpy/test_server/test_helpers/test_document.py index 1bb6ce12..ad0bf289 100644 --- a/test_opensearchpy/test_server/test_helpers/test_document.py +++ b/test_opensearchpy/test_server/test_helpers/test_document.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,6 +27,7 @@ from datetime import datetime from ipaddress import ip_address +from typing import Any import pytest from pytest import raises @@ -77,7 +79,7 @@ class Repository(Document): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -130,7 +132,7 @@ class Index: name = "test-serialization" -def test_serialization(write_client): +def test_serialization(write_client: Any) -> None: SerializationDoc.init() write_client.index( index="test-serialization", @@ -160,7 +162,7 @@ def test_serialization(write_client): } -def test_nested_inner_hits_are_wrapped_properly(pull_request): +def test_nested_inner_hits_are_wrapped_properly(pull_request: Any) -> None: history_query = Q( "nested", path="comments.history", @@ -188,7 +190,7 @@ def test_nested_inner_hits_are_wrapped_properly(pull_request): assert "score" in history.meta -def test_nested_inner_hits_are_deserialized_properly(pull_request): +def test_nested_inner_hits_are_deserialized_properly(pull_request: Any) -> None: s = PullRequest.search().query( "nested", inner_hits={}, @@ -203,7 +205,7 @@ def test_nested_inner_hits_are_deserialized_properly(pull_request): assert isinstance(pr.comments[0].created_at, datetime) -def test_nested_top_hits_are_wrapped_properly(pull_request): +def test_nested_top_hits_are_wrapped_properly(pull_request: Any) -> None: s = PullRequest.search() s.aggs.bucket("comments", "nested", path="comments").metric( "hits", "top_hits", size=1 @@ -215,7 +217,7 @@ def test_nested_top_hits_are_wrapped_properly(pull_request): assert isinstance(r.aggregations.comments.hits.hits[0], Comment) -def test_update_object_field(write_client): +def test_update_object_field(write_client: Any) -> None: Wiki.init() w = Wiki( owner=User(name="Honza Kral"), @@ -235,7 +237,7 @@ def test_update_object_field(write_client): assert w.ranked == {"test1": 0.1, "topic2": 0.2} -def test_update_script(write_client): +def test_update_script(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -245,7 +247,7 @@ def test_update_script(write_client): assert w.views == 47 -def test_update_retry_on_conflict(write_client): +def test_update_retry_on_conflict(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -259,8 +261,8 @@ def test_update_retry_on_conflict(write_client): assert w.views == 52 -@pytest.mark.parametrize("retry_on_conflict", [None, 0]) -def test_update_conflicting_version(write_client, retry_on_conflict): +@pytest.mark.parametrize("retry_on_conflict", [None, 0]) # type: ignore +def test_update_conflicting_version(write_client: Any, retry_on_conflict: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) w.save() @@ -277,7 +279,7 @@ def test_update_conflicting_version(write_client, retry_on_conflict): ) -def test_save_and_update_return_doc_meta(write_client): +def test_save_and_update_return_doc_meta(write_client: Any) -> None: Wiki.init() w = Wiki(owner=User(name="Honza Kral"), _id="opensearch-py", views=42) resp = w.save(return_doc_meta=True) @@ -301,31 +303,33 @@ def test_save_and_update_return_doc_meta(write_client): assert resp.keys().__contains__("_version") -def test_init(write_client): +def test_init(write_client: Any) -> None: Repository.init(index="test-git") assert write_client.indices.exists(index="test-git") -def test_get_raises_404_on_index_missing(data_client): +def test_get_raises_404_on_index_missing(data_client: Any) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php", index="not-there") -def test_get_raises_404_on_non_existent_id(data_client): +def test_get_raises_404_on_non_existent_id(data_client: Any) -> None: with raises(NotFoundError): Repository.get("opensearch-dsl-php") -def test_get_returns_none_if_404_ignored(data_client): +def test_get_returns_none_if_404_ignored(data_client: Any) -> None: assert None is Repository.get("opensearch-dsl-php", ignore=404) -def test_get_returns_none_if_404_ignored_and_index_doesnt_exist(data_client): +def test_get_returns_none_if_404_ignored_and_index_doesnt_exist( + data_client: Any, +) -> None: assert None is Repository.get("42", index="not-there", ignore=404) -def test_get(data_client): +def test_get(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") assert isinstance(opensearch_repo, Repository) @@ -333,15 +337,15 @@ def test_get(data_client): assert datetime(2014, 3, 3) == opensearch_repo.created_at -def test_exists_return_true(data_client): +def test_exists_return_true(data_client: Any) -> None: assert Repository.exists("opensearch-py") -def test_exists_false(data_client): +def test_exists_false(data_client: Any) -> None: assert not Repository.exists("opensearch-dsl-php") -def test_get_with_tz_date(data_client): +def test_get_with_tz_date(data_client: Any) -> None: first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" ) @@ -353,7 +357,7 @@ def test_get_with_tz_date(data_client): ) -def test_save_with_tz_date(data_client): +def test_save_with_tz_date(data_client: Any) -> None: tzinfo = timezone("Europe/Prague") first_commit = Commit.get( id="3ca6e1e73a071a705b4babd2f581c91a2a3e5037", routing="opensearch-py" @@ -380,7 +384,7 @@ def test_save_with_tz_date(data_client): ] -def test_mget(data_client): +def test_mget(data_client: Any) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING) assert commits[0] is None assert commits[1].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" @@ -388,23 +392,23 @@ def test_mget(data_client): assert commits[3].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_mget_raises_exception_when_missing_param_is_invalid(data_client): +def test_mget_raises_exception_when_missing_param_is_invalid(data_client: Any) -> None: with raises(ValueError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raj") -def test_mget_raises_404_when_missing_param_is_raise(data_client): +def test_mget_raises_404_when_missing_param_is_raise(data_client: Any) -> None: with raises(NotFoundError): Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="raise") -def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client): +def test_mget_ignores_missing_docs_when_missing_param_is_skip(data_client: Any) -> None: commits = Commit.mget(COMMIT_DOCS_WITH_MISSING, missing="skip") assert commits[0].meta.id == "3ca6e1e73a071a705b4babd2f581c91a2a3e5037" assert commits[1].meta.id == "eb3e543323f189fd7b698e66295427204fff5755" -def test_update_works_from_search_response(data_client): +def test_update_works_from_search_response(data_client: Any) -> None: opensearch_repo = Repository.search().execute()[0] opensearch_repo.update(owner={"other_name": "opensearchpy"}) @@ -415,7 +419,7 @@ def test_update_works_from_search_response(data_client): assert "opensearch" == new_version.owner.name -def test_update(data_client): +def test_update(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") v = opensearch_repo.meta.version @@ -439,7 +443,7 @@ def test_update(data_client): assert "primary_term" in new_version.meta -def test_save_updates_existing_doc(data_client): +def test_save_updates_existing_doc(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.new_field = "testing-save" @@ -452,7 +456,7 @@ def test_save_updates_existing_doc(data_client): assert new_repo["_seq_no"] == opensearch_repo.meta.seq_no -def test_save_automatically_uses_seq_no_and_primary_term(data_client): +def test_save_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -460,7 +464,7 @@ def test_save_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.save() -def test_delete_automatically_uses_seq_no_and_primary_term(data_client): +def test_delete_automatically_uses_seq_no_and_primary_term(data_client: Any) -> None: opensearch_repo = Repository.get("opensearch-py") opensearch_repo.meta.seq_no += 1 @@ -468,13 +472,13 @@ def test_delete_automatically_uses_seq_no_and_primary_term(data_client): opensearch_repo.delete() -def assert_doc_equals(expected, actual): +def assert_doc_equals(expected: Any, actual: Any) -> None: for f in expected: assert f in actual assert actual[f] == expected[f] -def test_can_save_to_different_index(write_client): +def test_can_save_to_different_index(write_client: Any) -> None: test_repo = Repository(description="testing", meta={"id": 42}) assert test_repo.save(index="test-document") @@ -489,7 +493,7 @@ def test_can_save_to_different_index(write_client): ) -def test_save_without_skip_empty_will_include_empty_fields(write_client): +def test_save_without_skip_empty_will_include_empty_fields(write_client: Any) -> None: test_repo = Repository(field_1=[], field_2=None, field_3={}, meta={"id": 42}) assert test_repo.save(index="test-document", skip_empty=False) @@ -504,7 +508,7 @@ def test_save_without_skip_empty_will_include_empty_fields(write_client): ) -def test_delete(write_client): +def test_delete(write_client: Any) -> None: write_client.create( index="test-document", id="opensearch-py", @@ -525,11 +529,11 @@ def test_delete(write_client): ) -def test_search(data_client): +def test_search(data_client: Any) -> None: assert Repository.search().count() == 1 -def test_search_returns_proper_doc_classes(data_client): +def test_search_returns_proper_doc_classes(data_client: Any) -> None: result = Repository.search().execute() opensearch_repo = result.hits[0] @@ -538,11 +542,13 @@ def test_search_returns_proper_doc_classes(data_client): assert opensearch_repo.owner.name == "opensearch" -def test_refresh_mapping(data_client): +def test_refresh_mapping(data_client: Any) -> None: class Commit(Document): class Index: name = "git" + _index: Any + Commit._index.load_mappings() assert "stats" in Commit._index._mapping @@ -552,7 +558,7 @@ class Index: assert isinstance(Commit._index._mapping["committed_date"], Date) -def test_highlight_in_meta(data_client): +def test_highlight_in_meta(data_client: Any) -> None: commit = ( Commit.search() .query("match", description="inverting") diff --git a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py index 6b9ee50c..38dd40cd 100644 --- a/test_opensearchpy/test_server/test_helpers/test_faceted_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_faceted_search.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -25,6 +26,7 @@ # under the License. from datetime import datetime +from typing import Any import pytest @@ -65,8 +67,8 @@ class MetricSearch(FacetedSearch): } -@pytest.fixture(scope="session") -def commit_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def commit_search_cls(opensearch_version: Any) -> Any: interval_kwargs = {"fixed_interval": "1d"} class CommitSearch(FacetedSearch): @@ -90,8 +92,8 @@ class CommitSearch(FacetedSearch): return CommitSearch -@pytest.fixture(scope="session") -def repo_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def repo_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class RepoSearch(FacetedSearch): @@ -104,15 +106,15 @@ class RepoSearch(FacetedSearch): ), } - def search(self): + def search(self) -> Any: s = super(RepoSearch, self).search() return s.filter("term", commit_repo="repo") return RepoSearch -@pytest.fixture(scope="session") -def pr_search_cls(opensearch_version): +@pytest.fixture(scope="session") # type: ignore +def pr_search_cls(opensearch_version: Any) -> Any: interval_type = "calendar_interval" class PRSearch(FacetedSearch): @@ -130,7 +132,7 @@ class PRSearch(FacetedSearch): return PRSearch -def test_facet_with_custom_metric(data_client): +def test_facet_with_custom_metric(data_client: Any) -> None: ms = MetricSearch() r = ms.execute() @@ -139,7 +141,7 @@ def test_facet_with_custom_metric(data_client): assert dates[0] == 1399038439000 -def test_nested_facet(pull_request, pr_search_cls): +def test_nested_facet(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls() r = prs.execute() @@ -147,7 +149,7 @@ def test_nested_facet(pull_request, pr_search_cls): assert [(datetime(2018, 1, 1, 0, 0), 1, False)] == r.facets.comments -def test_nested_facet_with_filter(pull_request, pr_search_cls): +def test_nested_facet_with_filter(pull_request: Any, pr_search_cls: Any) -> None: prs = pr_search_cls(filters={"comments": datetime(2018, 1, 1, 0, 0)}) r = prs.execute() @@ -159,7 +161,7 @@ def test_nested_facet_with_filter(pull_request, pr_search_cls): assert not r.hits -def test_datehistogram_facet(data_client, repo_search_cls): +def test_datehistogram_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = rs.execute() @@ -167,7 +169,7 @@ def test_datehistogram_facet(data_client, repo_search_cls): assert [(datetime(2014, 3, 1, 0, 0), 1, False)] == r.facets.created -def test_boolean_facet(data_client, repo_search_cls): +def test_boolean_facet(data_client: Any, repo_search_cls: Any) -> None: rs = repo_search_cls() r = rs.execute() @@ -178,8 +180,8 @@ def test_boolean_facet(data_client, repo_search_cls): def test_empty_search_finds_everything( - data_client, opensearch_version, commit_search_cls -): + data_client: Any, opensearch_version: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls() r = cs.execute() assert r.hits.total.value == 52 @@ -224,8 +226,8 @@ def test_empty_search_finds_everything( def test_term_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls -): + data_client: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls(filters={"files": "test_opensearchpy/test_dsl"}) r = cs.execute() @@ -270,8 +272,8 @@ def test_term_filters_are_shown_as_selected_and_data_is_filtered( def test_range_filters_are_shown_as_selected_and_data_is_filtered( - data_client, commit_search_cls -): + data_client: Any, commit_search_cls: Any +) -> None: cs = commit_search_cls(filters={"deletions": "better"}) r = cs.execute() @@ -279,7 +281,7 @@ def test_range_filters_are_shown_as_selected_and_data_is_filtered( assert 19 == r.hits.total.value -def test_pagination(data_client, commit_search_cls): +def test_pagination(data_client: Any, commit_search_cls: Any) -> None: cs = commit_search_cls() cs = cs[0:20] diff --git a/test_opensearchpy/test_server/test_helpers/test_index.py b/test_opensearchpy/test_server/test_helpers/test_index.py index 7df4a737..71f0501a 100644 --- a/test_opensearchpy/test_server/test_helpers/test_index.py +++ b/test_opensearchpy/test_server/test_helpers/test_index.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,6 +25,8 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy import Date, Document, Index, IndexTemplate, Text from opensearchpy.helpers import analysis @@ -33,7 +36,7 @@ class Post(Document): published_from = Date() -def test_index_template_works(write_client): +def test_index_template_works(write_client: Any) -> None: it = IndexTemplate("test-template", "test-*") it.document(Post) it.settings(number_of_replicas=0, number_of_shards=1) @@ -54,7 +57,7 @@ def test_index_template_works(write_client): } == write_client.indices.get_mapping(index="test-blog") -def test_index_can_be_saved_even_with_settings(write_client): +def test_index_can_be_saved_even_with_settings(write_client: Any) -> None: i = Index("test-blog", using=write_client) i.settings(number_of_shards=3, number_of_replicas=0) i.save() @@ -66,12 +69,12 @@ def test_index_can_be_saved_even_with_settings(write_client): ) -def test_index_exists(data_client): +def test_index_exists(data_client: Any) -> None: assert Index("git").exists() assert not Index("not-there").exists() -def test_index_can_be_created_with_settings_and_mappings(write_client): +def test_index_can_be_created_with_settings_and_mappings(write_client: Any) -> None: i = Index("test-blog", using=write_client) i.document(Post) i.settings(number_of_replicas=0, number_of_shards=1) @@ -96,7 +99,7 @@ def test_index_can_be_created_with_settings_and_mappings(write_client): } -def test_delete(write_client): +def test_delete(write_client: Any) -> None: write_client.indices.create( index="test-index", body={"settings": {"number_of_replicas": 0, "number_of_shards": 1}}, @@ -107,7 +110,7 @@ def test_delete(write_client): assert not write_client.indices.exists(index="test-index") -def test_multiple_indices_with_same_doc_type_work(write_client): +def test_multiple_indices_with_same_doc_type_work(write_client: Any) -> None: i1 = Index("test-index-1", using=write_client) i2 = Index("test-index-2", using=write_client) @@ -115,8 +118,8 @@ def test_multiple_indices_with_same_doc_type_work(write_client): i.document(Post) i.create() - for i in ("test-index-1", "test-index-2"): - settings = write_client.indices.get_settings(index=i) - assert settings[i]["settings"]["index"]["analysis"] == { + for j in ("test-index-1", "test-index-2"): + settings = write_client.indices.get_settings(index=j) + assert settings[j]["settings"]["index"]["analysis"] == { "analyzer": {"my_analyzer": {"type": "custom", "tokenizer": "keyword"}} } diff --git a/test_opensearchpy/test_server/test_helpers/test_mapping.py b/test_opensearchpy/test_server/test_helpers/test_mapping.py index d5d84469..722a249e 100644 --- a/test_opensearchpy/test_server/test_helpers/test_mapping.py +++ b/test_opensearchpy/test_server/test_helpers/test_mapping.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,13 +25,15 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from pytest import raises from opensearchpy import exceptions from opensearchpy.helpers import analysis, mapping -def test_mapping_saved_into_opensearch(write_client): +def test_mapping_saved_into_opensearch(write_client: Any) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -50,7 +53,9 @@ def test_mapping_saved_into_opensearch(write_client): } == write_client.indices.get_mapping(index="test-mapping") -def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_client): +def test_mapping_saved_into_opensearch_when_index_already_exists_closed( + write_client: Any, +) -> None: m = mapping.Mapping() m.field( "name", "text", analyzer=analysis.analyzer("my_analyzer", tokenizer="keyword") @@ -74,8 +79,8 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_closed(write_cl def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( - write_client, -): + write_client: Any, +) -> None: m = mapping.Mapping() analyzer = analysis.analyzer("my_analyzer", tokenizer="keyword") m.field("name", "text", analyzer=analyzer) @@ -104,7 +109,7 @@ def test_mapping_saved_into_opensearch_when_index_already_exists_with_analysis( } == write_client.indices.get_mapping(index="test-mapping") -def test_mapping_gets_updated_from_opensearch(write_client): +def test_mapping_gets_updated_from_opensearch(write_client: Any) -> None: write_client.indices.create( index="test-mapping", body={ diff --git a/test_opensearchpy/test_server/test_helpers/test_search.py b/test_opensearchpy/test_server/test_helpers/test_search.py index 90aabbc0..4fb00597 100644 --- a/test_opensearchpy/test_server/test_helpers/test_search.py +++ b/test_opensearchpy/test_server/test_helpers/test_search.py @@ -27,6 +27,8 @@ from __future__ import unicode_literals +from typing import Any + from pytest import raises from opensearchpy import ( @@ -50,7 +52,7 @@ class Repository(Document): tags = Keyword() @classmethod - def search(cls): + def search(cls, using: Any = None, index: Any = None) -> Any: return super(Repository, cls).search().filter("term", commit_repo="repo") class Index: @@ -62,7 +64,7 @@ class Index: name = "flat-git" -def test_filters_aggregation_buckets_are_accessible(data_client): +def test_filters_aggregation_buckets_are_accessible(data_client: Any) -> None: has_tests_query = Q("term", files="test_opensearchpy/test_dsl") s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").bucket( @@ -83,7 +85,7 @@ def test_filters_aggregation_buckets_are_accessible(data_client): ) -def test_top_hits_are_wrapped_in_response(data_client): +def test_top_hits_are_wrapped_in_response(data_client: Any) -> None: s = Commit.search()[0:0] s.aggs.bucket("top_authors", "terms", field="author.name.raw").metric( "top_commits", "top_hits", size=5 @@ -99,7 +101,7 @@ def test_top_hits_are_wrapped_in_response(data_client): assert isinstance(hits[0], Commit) -def test_inner_hits_are_wrapped_in_response(data_client): +def test_inner_hits_are_wrapped_in_response(data_client: Any) -> None: s = Search(index="git")[0:1].query( "has_parent", parent_type="repo", inner_hits={}, query=Q("match_all") ) @@ -110,7 +112,7 @@ def test_inner_hits_are_wrapped_in_response(data_client): assert repr(commit.meta.inner_hits.repo[0]).startswith(" None: repos = list(Repository.search().scan()) assert 1 == len(repos) @@ -118,7 +120,7 @@ def test_scan_respects_doc_types(data_client): assert repos[0].organization == "opensearch" -def test_scan_iterates_through_all_docs(data_client): +def test_scan_iterates_through_all_docs(data_client: Any) -> None: s = Search(index="flat-git") commits = list(s.scan()) @@ -127,7 +129,7 @@ def test_scan_iterates_through_all_docs(data_client): assert {d["_id"] for d in FLAT_DATA} == {c.meta.id for c in commits} -def test_response_is_cached(data_client): +def test_response_is_cached(data_client: Any) -> None: s = Repository.search() repos = list(s) @@ -135,7 +137,7 @@ def test_response_is_cached(data_client): assert s._response.hits == repos -def test_multi_search(data_client): +def test_multi_search(data_client: Any) -> None: s1 = Repository.search() s2 = Search(index="flat-git") @@ -152,7 +154,7 @@ def test_multi_search(data_client): assert r2._search is s2 -def test_multi_missing(data_client): +def test_multi_missing(data_client: Any) -> None: s1 = Repository.search() s2 = Search(index="flat-git") s3 = Search(index="does_not_exist") @@ -175,7 +177,7 @@ def test_multi_missing(data_client): assert r3 is None -def test_raw_subfield_can_be_used_in_aggs(data_client): +def test_raw_subfield_can_be_used_in_aggs(data_client: Any) -> None: s = Search(index="git")[0:0] s.aggs.bucket("authors", "terms", field="author.name.raw", size=1) diff --git a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py index b22db642..dfc4d250 100644 --- a/test_opensearchpy/test_server/test_helpers/test_update_by_query.py +++ b/test_opensearchpy/test_server/test_helpers/test_update_by_query.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -24,11 +25,13 @@ # specific language governing permissions and limitations # under the License. +from typing import Any + from opensearchpy.helpers.search import Q from opensearchpy.helpers.update_by_query import UpdateByQuery -def test_update_by_query_no_script(write_client, setup_ubq_tests): +def test_update_by_query_no_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( @@ -47,7 +50,7 @@ def test_update_by_query_no_script(write_client, setup_ubq_tests): assert response.success() -def test_update_by_query_with_script(write_client, setup_ubq_tests): +def test_update_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( @@ -64,7 +67,7 @@ def test_update_by_query_with_script(write_client, setup_ubq_tests): assert response.version_conflicts == 0 -def test_delete_by_query_with_script(write_client, setup_ubq_tests): +def test_delete_by_query_with_script(write_client: Any, setup_ubq_tests: Any) -> None: index = setup_ubq_tests ubq = ( diff --git a/test_opensearchpy/test_server/test_plugins/__init__.py b/test_opensearchpy/test_server/test_plugins/__init__.py index 7e52ae22..392fa5bd 100644 --- a/test_opensearchpy/test_server/test_plugins/__init__.py +++ b/test_opensearchpy/test_server/test_plugins/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server/test_plugins/test_alerting.py b/test_opensearchpy/test_server/test_plugins/test_alerting.py index 406bd71f..aa1eaf6c 100644 --- a/test_opensearchpy/test_server/test_plugins/test_alerting.py +++ b/test_opensearchpy/test_server/test_plugins/test_alerting.py @@ -23,7 +23,7 @@ class TestAlertingPlugin(OpenSearchTestCase): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_create_destination(self): + def test_create_destination(self) -> None: # Test to create alert destination dummy_destination = { "name": "my-destination", @@ -39,7 +39,7 @@ def test_create_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_destination(self): + def test_get_destination(self) -> None: # Create a dummy destination self.test_create_destination() @@ -54,7 +54,7 @@ def test_get_destination(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_create_monitor(self): + def test_create_monitor(self) -> None: # Create a dummy destination self.test_create_destination() @@ -119,11 +119,11 @@ def test_create_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_search_monitor(self): + def test_search_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -137,11 +137,11 @@ def test_search_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_get_monitor(self): + def test_get_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query @@ -161,11 +161,11 @@ def test_get_monitor(self): (OPENSEARCH_VERSION) and (OPENSEARCH_VERSION < (2, 0, 0)), "Plugin not supported for opensearch version", ) - def test_run_monitor(self): + def test_run_monitor(self) -> None: # Create a dummy monitor self.test_create_monitor() - # Create a monitor search query by it's name + # Create a monitor search query by its name query = {"query": {"match": {"monitor.name": "test-monitor"}}} # Perform the search with the above query diff --git a/test_opensearchpy/test_server/test_plugins/test_index_management.py b/test_opensearchpy/test_server/test_plugins/test_index_management.py index 68f61c7b..ed8c0b57 100644 --- a/test_opensearchpy/test_server/test_plugins/test_index_management.py +++ b/test_opensearchpy/test_server/test_plugins/test_index_management.py @@ -64,7 +64,7 @@ class TestIndexManagementPlugin(OpenSearchTestCase): } } - def test_create_policy(self): + def test_create_policy(self) -> None: # Test to create policy response = self.client.index_management.put_policy( policy=self.POLICY_NAME, body=self.POLICY_CONTENT @@ -73,7 +73,7 @@ def test_create_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_get_policy(self): + def test_get_policy(self) -> None: # Create a policy self.test_create_policy() @@ -84,7 +84,7 @@ def test_get_policy(self): self.assertIn("_id", response) self.assertEqual(response["_id"], self.POLICY_NAME) - def test_update_policy(self): + def test_update_policy(self) -> None: # Create a policy self.test_create_policy() @@ -106,7 +106,7 @@ def test_update_policy(self): self.assertNotIn("errors", response) self.assertIn("_id", response) - def test_delete_policy(self): + def test_delete_policy(self) -> None: # Create a policy self.test_create_policy() diff --git a/test_opensearchpy/test_server/test_rest_api_spec.py b/test_opensearchpy/test_server/test_rest_api_spec.py index 7d1cbf51..3249f41b 100644 --- a/test_opensearchpy/test_server/test_rest_api_spec.py +++ b/test_opensearchpy/test_server/test_rest_api_spec.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -33,9 +34,9 @@ import io import os import re -import sys import warnings import zipfile +from typing import Any import pytest import urllib3 @@ -135,31 +136,30 @@ OPENSEARCH_VERSION = None RUN_ASYNC_REST_API_TESTS = ( - sys.version_info >= (3, 6) - and os.environ.get("PYTHON_CONNECTION_CLASS") == "RequestsHttpConnection" + os.environ.get("PYTHON_CONNECTION_CLASS") == "RequestsHttpConnection" ) FALSEY_VALUES = ("", None, False, 0, 0.0) class YamlRunner: - def __init__(self, client): + def __init__(self, client: Any) -> None: self.client = client - self.last_response = None + self.last_response: Any = None - self._run_code = None - self._setup_code = None - self._teardown_code = None - self._state = {} + self._run_code: Any = None + self._setup_code: Any = None + self._teardown_code: Any = None + self._state: Any = {} - def use_spec(self, test_spec): + def use_spec(self, test_spec: Any) -> None: self._setup_code = test_spec.pop("setup", None) self._run_code = test_spec.pop("run", None) self._teardown_code = test_spec.pop("teardown", None) - def setup(self): + def setup(self) -> Any: # Pull skips from individual tests to not do unnecessary setup. - skip_code = [] + skip_code: Any = [] for action in self._run_code: assert len(action) == 1 action_type, _ = list(action.items())[0] @@ -175,12 +175,12 @@ def setup(self): if self._setup_code: self.run_code(self._setup_code) - def teardown(self): + def teardown(self) -> Any: if self._teardown_code: self.section("teardown") self.run_code(self._teardown_code) - def opensearch_version(self): + def opensearch_version(self) -> Any: global OPENSEARCH_VERSION if OPENSEARCH_VERSION is None: version_string = (self.client.info())["version"]["number"] @@ -190,10 +190,10 @@ def opensearch_version(self): OPENSEARCH_VERSION = tuple(int(v) if v.isdigit() else 99 for v in version) return OPENSEARCH_VERSION - def section(self, name): + def section(self, name: str) -> None: print(("=" * 10) + " " + name + " " + ("=" * 10)) - def run(self): + def run(self) -> Any: try: self.setup() self.section("test") @@ -204,8 +204,8 @@ def run(self): except Exception: pass - def run_code(self, test): - """Execute an instruction based on it's type.""" + def run_code(self, test: Any) -> Any: + """Execute an instruction based on its type.""" for action in test: assert len(action) == 1 action_type, action = list(action.items())[0] @@ -216,7 +216,7 @@ def run_code(self, test): else: raise RuntimeError("Invalid action type %r" % (action_type,)) - def run_do(self, action): + def run_do(self, action: Any) -> Any: api = self.client headers = action.pop("headers", None) catch = action.pop("catch", None) @@ -268,7 +268,7 @@ def run_do(self, action): # Filter out warnings raised by other components. caught_warnings = [ - str(w.message) + str(w.message) # type: ignore for w in caught_warnings if w.category == OpenSearchWarning and str(w.message) not in allowed_warnings @@ -276,13 +276,13 @@ def run_do(self, action): # Sorting removes the issue with order raised. We only care about # if all warnings are raised in the single API call. - if warn and sorted(warn) != sorted(caught_warnings): + if warn and sorted(warn) != sorted(caught_warnings): # type: ignore raise AssertionError( "Expected warnings not equal to actual warnings: expected=%r actual=%r" % (warn, caught_warnings) ) - def run_catch(self, catch, exception): + def run_catch(self, catch: Any, exception: Any) -> None: if catch == "param": assert isinstance(exception, TypeError) return @@ -297,7 +297,7 @@ def run_catch(self, catch, exception): ) is not None self.last_response = exception.info - def run_skip(self, skip): + def run_skip(self, skip: Any) -> Any: global IMPLEMENTED_FEATURES if "features" in skip: @@ -319,32 +319,32 @@ def run_skip(self, skip): if min_version <= (self.opensearch_version()) <= max_version: pytest.skip(reason) - def run_gt(self, action): + def run_gt(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) > value - def run_gte(self, action): + def run_gte(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) >= value - def run_lt(self, action): + def run_lt(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) < value - def run_lte(self, action): + def run_lte(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) assert self._lookup(key) <= value - def run_set(self, action): + def run_set(self, action: Any) -> None: for key, value in action.items(): value = self._resolve(value) self._state[value] = self._lookup(key) - def run_is_false(self, action): + def run_is_false(self, action: Any) -> None: try: value = self._lookup(action) except AssertionError: @@ -352,23 +352,23 @@ def run_is_false(self, action): else: assert value in FALSEY_VALUES - def run_is_true(self, action): + def run_is_true(self, action: Any) -> None: value = self._lookup(action) assert value not in FALSEY_VALUES - def run_length(self, action): + def run_length(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) assert expected == len(value) - def run_match(self, action): + def run_match(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) expected = self._resolve(expected) if ( - isinstance(expected, string_types) + isinstance(expected, str) and expected.startswith("/") and expected.endswith("/") ): @@ -380,7 +380,7 @@ def run_match(self, action): else: self._assert_match_equals(value, expected) - def run_contains(self, action): + def run_contains(self, action: Any) -> None: for path, expected in action.items(): value = self._lookup(path) # list[dict[str,str]] is returned expected = self._resolve(expected) # dict[str, str] @@ -388,7 +388,7 @@ def run_contains(self, action): if expected not in value: raise AssertionError("%s is not contained by %s" % (expected, value)) - def run_transform_and_set(self, action): + def run_transform_and_set(self, action: Any) -> None: for key, value in action.items(): # Convert #base64EncodeCredentials(id,api_key) to ["id", "api_key"] if "#base64EncodeCredentials" in value: @@ -398,7 +398,7 @@ def run_transform_and_set(self, action): (self._lookup(value[0]), self._lookup(value[1])) ) - def _resolve(self, value): + def _resolve(self, value: Any) -> Any: # resolve variables if isinstance(value, string_types) and "$" in value: for k, v in self._state.items(): @@ -423,12 +423,13 @@ def _resolve(self, value): value = list(map(self._resolve, value)) return value - def _lookup(self, path): + def _lookup(self, path: str) -> Any: # fetch the possibly nested value from last_response - value = self.last_response + value: Any = self.last_response if path == "$body": return value path = path.replace(r"\.", "\1") + step: Any for step in path.split("."): if not step: continue @@ -450,10 +451,10 @@ def _lookup(self, path): value = value[step] return value - def _feature_enabled(self, name): + def _feature_enabled(self, name: str) -> Any: return False - def _assert_match_equals(self, a, b): + def _assert_match_equals(self, a: Any, b: Any) -> None: # Handle for large floating points with 'E' if isinstance(b, string_types) and isinstance(a, float) and "e" in repr(a): a = repr(a).replace("e+", "E") @@ -461,8 +462,8 @@ def _assert_match_equals(self, a, b): assert a == b, "%r does not match %r" % (a, b) -@pytest.fixture(scope="function") -def sync_runner(sync_client): +@pytest.fixture(scope="function") # type: ignore +def sync_runner(sync_client: Any) -> Any: return YamlRunner(sync_client) @@ -533,8 +534,8 @@ def sync_runner(sync_client): if not RUN_ASYNC_REST_API_TESTS: - @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) - def test_rest_api_spec(test_spec, sync_runner): + @pytest.mark.parametrize("test_spec", YAML_TEST_SPECS) # type: ignore + def test_rest_api_spec(test_spec: Any, sync_runner: Any) -> None: if test_spec.get("skip", False): pytest.skip("Manually skipped in 'SKIP_TESTS'") sync_runner.use_spec(test_spec) diff --git a/test_opensearchpy/test_server_secured/__init__.py b/test_opensearchpy/test_server_secured/__init__.py index 6c0097cd..22c54ac8 100644 --- a/test_opensearchpy/test_server_secured/__init__.py +++ b/test_opensearchpy/test_server_secured/__init__.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_server_secured/test_clients.py b/test_opensearchpy/test_server_secured/test_clients.py index e597c6ac..94684ffb 100644 --- a/test_opensearchpy/test_server_secured/test_clients.py +++ b/test_opensearchpy/test_server_secured/test_clients.py @@ -15,7 +15,7 @@ class TestSecurity(TestCase): - def test_security(self): + def test_security(self) -> None: client = OpenSearch( OPENSEARCH_URL, http_auth=("admin", "admin"), diff --git a/test_opensearchpy/test_server_secured/test_security_plugin.py b/test_opensearchpy/test_server_secured/test_security_plugin.py index 1f46712a..e43b2278 100644 --- a/test_opensearchpy/test_server_secured/test_security_plugin.py +++ b/test_opensearchpy/test_server_secured/test_security_plugin.py @@ -36,15 +36,15 @@ class TestSecurityPlugin(TestCase): USER_NAME = "test-user" USER_CONTENT = {"password": "opensearchpy@123", "opendistro_security_roles": []} - def setUp(self): + def setUp(self) -> None: self.client = get_test_client(verify_certs=False, http_auth=("admin", "admin")) add_connection("default", self.client) - def tearDown(self): + def tearDown(self) -> None: if self.client: self.client.close() - def test_create_role(self): + def test_create_role(self) -> None: # Test to create role response = self.client.security.create_role( self.ROLE_NAME, body=self.ROLE_CONTENT @@ -53,7 +53,7 @@ def test_create_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_role_with_body_param_empty(self): + def test_create_role_with_body_param_empty(self) -> None: try: self.client.security.create_role(self.ROLE_NAME, body="") except ValueError as error: @@ -61,7 +61,7 @@ def test_create_role_with_body_param_empty(self): else: assert False - def test_get_role(self): + def test_get_role(self) -> None: # Create a role self.test_create_role() @@ -71,7 +71,7 @@ def test_get_role(self): self.assertNotIn("errors", response) self.assertIn(self.ROLE_NAME, response) - def test_update_role(self): + def test_update_role(self) -> None: # Create a role self.test_create_role() @@ -84,7 +84,7 @@ def test_update_role(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_role(self): + def test_delete_role(self) -> None: # Create a role self.test_create_role() @@ -97,7 +97,7 @@ def test_delete_role(self): with self.assertRaises(NotFoundError): response = self.client.security.get_role(self.ROLE_NAME) - def test_create_user(self): + def test_create_user(self) -> None: # Test to create user response = self.client.security.create_user( self.USER_NAME, body=self.USER_CONTENT @@ -106,7 +106,7 @@ def test_create_user(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_create_user_with_body_param_empty(self): + def test_create_user_with_body_param_empty(self) -> None: try: self.client.security.create_user(self.USER_NAME, body="") except ValueError as error: @@ -114,7 +114,7 @@ def test_create_user_with_body_param_empty(self): else: assert False - def test_create_user_with_role(self): + def test_create_user_with_role(self) -> None: self.test_create_role() # Test to create user @@ -129,7 +129,7 @@ def test_create_user_with_role(self): self.assertNotIn("errors", response) self.assertIn(response.get("status"), ["CREATED", "OK"]) - def test_get_user(self): + def test_get_user(self) -> None: # Create a user self.test_create_user() @@ -139,7 +139,7 @@ def test_get_user(self): self.assertNotIn("errors", response) self.assertIn(self.USER_NAME, response) - def test_update_user(self): + def test_update_user(self) -> None: # Create a user self.test_create_user() @@ -152,7 +152,7 @@ def test_update_user(self): self.assertNotIn("errors", response) self.assertEqual("OK", response.get("status")) - def test_delete_user(self): + def test_delete_user(self) -> None: # Create a user self.test_create_user() @@ -164,3 +164,55 @@ def test_delete_user(self): # Try fetching the user with self.assertRaises(NotFoundError): response = self.client.security.get_user(self.USER_NAME) + + def test_health_check(self) -> None: + response = self.client.security.health_check() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + def test_health(self) -> None: + response = self.client.security.health() + self.assertNotIn("errors", response) + self.assertEqual("UP", response.get("status")) + + AUDIT_CONFIG_SETTINGS = { + "enabled": True, + "audit": { + "ignore_users": [], + "ignore_requests": [], + "disabled_rest_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "disabled_transport_categories": ["AUTHENTICATED", "GRANTED_PRIVILEGES"], + "log_request_body": False, + "resolve_indices": False, + "resolve_bulk_requests": False, + "exclude_sensitive_headers": True, + "enable_transport": False, + "enable_rest": True, + }, + "compliance": { + "enabled": True, + "write_log_diffs": False, + "read_watched_fields": {}, + "read_ignore_users": [], + "write_watched_indices": [], + "write_ignore_users": [], + "read_metadata_only": True, + "write_metadata_only": True, + "external_config": False, + "internal_config": True, + }, + } + + def test_update_audit_config(self) -> None: + response = self.client.security.update_audit_config( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) + + def test_update_audit_configuration(self) -> None: + response = self.client.security.update_audit_configuration( + body=self.AUDIT_CONFIG_SETTINGS + ) + self.assertNotIn("errors", response) + self.assertEqual("OK", response.get("status")) diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index 2c0892cf..dc1a8f9e 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -30,6 +30,7 @@ import json import time +from typing import Any from mock import patch @@ -42,14 +43,14 @@ class DummyConnection(Connection): - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: self.exception = kwargs.pop("exception", None) self.status, self.data = kwargs.pop("status", 200), kwargs.pop("data", "{}") self.headers = kwargs.pop("headers", {}) - self.calls = [] + self.calls: Any = [] super(DummyConnection, self).__init__(**kwargs) - def perform_request(self, *args, **kwargs): + def perform_request(self, *args: Any, **kwargs: Any) -> Any: self.calls.append((args, kwargs)) if self.exception: raise self.exception @@ -108,7 +109,7 @@ def perform_request(self, *args, **kwargs): class TestHostsInfoCallback(TestCase): - def test_cluster_manager_only_nodes_are_ignored(self): + def test_cluster_manager_only_nodes_are_ignored(self) -> None: nodes = [ {"roles": ["cluster_manager"]}, {"roles": ["cluster_manager", "data", "ingest"]}, @@ -119,20 +120,20 @@ def test_cluster_manager_only_nodes_are_ignored(self): chosen = [ i for i, node_info in enumerate(nodes) - if get_host_info(node_info, i) is not None + if get_host_info(node_info, i) is not None # type: ignore ] self.assertEqual([1, 2, 3, 4], chosen) class TestTransport(TestCase): - def test_single_connection_uses_dummy_connection_pool(self): - t = Transport([{}]) - self.assertIsInstance(t.connection_pool, DummyConnectionPool) - t = Transport([{"host": "localhost"}]) - self.assertIsInstance(t.connection_pool, DummyConnectionPool) + def test_single_connection_uses_dummy_connection_pool(self) -> None: + t1: Any = Transport([{}]) + self.assertIsInstance(t1.connection_pool, DummyConnectionPool) + t2: Any = Transport([{"host": "localhost"}]) + self.assertIsInstance(t2.connection_pool, DummyConnectionPool) - def test_request_timeout_extracted_from_params_and_passed(self): - t = Transport([{}], connection_class=DummyConnection) + def test_request_timeout_extracted_from_params_and_passed(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"request_timeout": 42}) self.assertEqual(1, len(t.get_connection().calls)) @@ -142,8 +143,8 @@ def test_request_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_timeout_extracted_from_params_and_passed(self): - t = Transport([{}], connection_class=DummyConnection) + def test_timeout_extracted_from_params_and_passed(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", params={"timeout": 84}) self.assertEqual(1, len(t.get_connection().calls)) @@ -153,8 +154,8 @@ def test_timeout_extracted_from_params_and_passed(self): t.get_connection().calls[0][1], ) - def test_opaque_id(self): - t = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) + def test_opaque_id(self) -> None: + t: Any = Transport([{}], opaque_id="app-1", connection_class=DummyConnection) t.perform_request("GET", "/") self.assertEqual(1, len(t.get_connection().calls)) @@ -173,8 +174,8 @@ def test_opaque_id(self): t.get_connection().calls[1][1], ) - def test_request_with_custom_user_agent_header(self): - t = Transport([{}], connection_class=DummyConnection) + def test_request_with_custom_user_agent_header(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", headers={"user-agent": "my-custom-value/1.2.3"}) self.assertEqual(1, len(t.get_connection().calls)) @@ -187,8 +188,10 @@ def test_request_with_custom_user_agent_header(self): t.get_connection().calls[0][1], ) - def test_send_get_body_as_source(self): - t = Transport([{}], send_get_body_as="source", connection_class=DummyConnection) + def test_send_get_body_as_source(self) -> None: + t: Any = Transport( + [{}], send_get_body_as="source", connection_class=DummyConnection + ) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) @@ -196,15 +199,17 @@ def test_send_get_body_as_source(self): ("GET", "/", {"source": "{}"}, None), t.get_connection().calls[0][0] ) - def test_send_get_body_as_post(self): - t = Transport([{}], send_get_body_as="POST", connection_class=DummyConnection) + def test_send_get_body_as_post(self) -> None: + t: Any = Transport( + [{}], send_get_body_as="POST", connection_class=DummyConnection + ) t.perform_request("GET", "/", body={}) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("POST", "/", None, b"{}"), t.get_connection().calls[0][0]) - def test_body_gets_encoded_into_bytes(self): - t = Transport([{}], connection_class=DummyConnection) + def test_body_gets_encoded_into_bytes(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好") self.assertEqual(1, len(t.get_connection().calls)) @@ -213,16 +218,16 @@ def test_body_gets_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_body_bytes_get_passed_untouched(self): - t = Transport([{}], connection_class=DummyConnection) + def test_body_bytes_get_passed_untouched(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) body = b"\xe4\xbd\xa0\xe5\xa5\xbd" t.perform_request("GET", "/", body=body) self.assertEqual(1, len(t.get_connection().calls)) self.assertEqual(("GET", "/", None, body), t.get_connection().calls[0][0]) - def test_body_surrogates_replaced_encoded_into_bytes(self): - t = Transport([{}], connection_class=DummyConnection) + def test_body_surrogates_replaced_encoded_into_bytes(self) -> None: + t: Any = Transport([{}], connection_class=DummyConnection) t.perform_request("GET", "/", body="你好\uda6a") self.assertEqual(1, len(t.get_connection().calls)) @@ -231,27 +236,27 @@ def test_body_surrogates_replaced_encoded_into_bytes(self): t.get_connection().calls[0][0], ) - def test_kwargs_passed_on_to_connections(self): - t = Transport([{"host": "google.com"}], port=123) + def test_kwargs_passed_on_to_connections(self) -> None: + t: Any = Transport([{"host": "google.com"}], port=123) self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://google.com:123", t.connection_pool.connections[0].host) - def test_kwargs_passed_on_to_connection_pool(self): + def test_kwargs_passed_on_to_connection_pool(self) -> None: dt = object() - t = Transport([{}, {}], dead_timeout=dt) + t: Any = Transport([{}, {}], dead_timeout=dt) self.assertIs(dt, t.connection_pool.dead_timeout) - def test_custom_connection_class(self): - class MyConnection(object): - def __init__(self, **kwargs): + def test_custom_connection_class(self) -> None: + class MyConnection(Connection): + def __init__(self, **kwargs: Any) -> None: self.kwargs = kwargs - t = Transport([{}], connection_class=MyConnection) + t: Any = Transport([{}], connection_class=MyConnection) self.assertEqual(1, len(t.connection_pool.connections)) self.assertIsInstance(t.connection_pool.connections[0], MyConnection) - def test_add_connection(self): - t = Transport([{}], randomize_hosts=False) + def test_add_connection(self) -> None: + t: Any = Transport([{}], randomize_hosts=False) t.add_connection({"host": "google.com", "port": 1234}) self.assertEqual(2, len(t.connection_pool.connections)) @@ -259,8 +264,8 @@ def test_add_connection(self): "http://google.com:1234", t.connection_pool.connections[1].host ) - def test_request_will_fail_after_X_retries(self): - t = Transport( + def test_request_will_fail_after_X_retries(self) -> None: + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}], connection_class=DummyConnection, ) @@ -268,8 +273,8 @@ def test_request_will_fail_after_X_retries(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(4, len(t.get_connection().calls)) - def test_failed_connection_will_be_marked_as_dead(self): - t = Transport( + def test_failed_connection_will_be_marked_as_dead(self) -> None: + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}] * 2, connection_class=DummyConnection, ) @@ -277,9 +282,9 @@ def test_failed_connection_will_be_marked_as_dead(self): self.assertRaises(ConnectionError, t.perform_request, "GET", "/") self.assertEqual(0, len(t.connection_pool.connections)) - def test_resurrected_connection_will_be_marked_as_live_on_success(self): + def test_resurrected_connection_will_be_marked_as_live_on_success(self) -> None: for method in ("GET", "HEAD"): - t = Transport([{}, {}], connection_class=DummyConnection) + t: Any = Transport([{}, {}], connection_class=DummyConnection) con1 = t.connection_pool.get_connection() con2 = t.connection_pool.get_connection() t.connection_pool.mark_dead(con1) @@ -289,16 +294,16 @@ def test_resurrected_connection_will_be_marked_as_live_on_success(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual(1, len(t.connection_pool.dead_count)) - def test_sniff_will_use_seed_connections(self): - t = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) + def test_sniff_will_use_seed_connections(self) -> None: + t: Any = Transport([{"data": CLUSTER_NODES}], connection_class=DummyConnection) t.set_connections([{"data": "invalid"}]) t.sniff_hosts() self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_fetches_and_uses_nodes_list(self): - t = Transport( + def test_sniff_on_start_fetches_and_uses_nodes_list(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -306,8 +311,8 @@ def test_sniff_on_start_fetches_and_uses_nodes_list(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertEqual("http://1.1.1.1:123", t.get_connection().host) - def test_sniff_on_start_ignores_sniff_timeout(self): - t = Transport( + def test_sniff_on_start_ignores_sniff_timeout(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_start=True, @@ -318,8 +323,8 @@ def test_sniff_on_start_ignores_sniff_timeout(self): t.seed_connections[0].calls[0], ) - def test_sniff_uses_sniff_timeout(self): - t = Transport( + def test_sniff_uses_sniff_timeout(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_timeout=42, @@ -330,8 +335,8 @@ def test_sniff_uses_sniff_timeout(self): t.seed_connections[0].calls[0], ) - def test_sniff_reuses_connection_instances_if_possible(self): - t = Transport( + def test_sniff_reuses_connection_instances_if_possible(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}, {"host": "1.1.1.1", "port": 123}], connection_class=DummyConnection, randomize_hosts=False, @@ -342,8 +347,8 @@ def test_sniff_reuses_connection_instances_if_possible(self): self.assertEqual(1, len(t.connection_pool.connections)) self.assertIs(connection, t.get_connection()) - def test_sniff_on_fail_triggers_sniffing_on_fail(self): - t = Transport( + def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -356,9 +361,11 @@ def test_sniff_on_fail_triggers_sniffing_on_fail(self): self.assertEqual("http://1.1.1.1:123", t.get_connection().host) @patch("opensearchpy.transport.Transport.sniff_hosts") - def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): + def test_sniff_on_fail_failing_does_not_prevent_retires( + self, sniff_hosts: Any + ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] - t = Transport( + t: Any = Transport( [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], connection_class=DummyConnection, sniff_on_connection_fail=True, @@ -373,8 +380,8 @@ def test_sniff_on_fail_failing_does_not_prevent_retires(self, sniff_hosts): self.assertEqual(1, len(conn_err.calls)) self.assertEqual(1, len(conn_data.calls)) - def test_sniff_after_n_seconds(self): - t = Transport( + def test_sniff_after_n_seconds(self) -> None: + t: Any = Transport( [{"data": CLUSTER_NODES}], connection_class=DummyConnection, sniffer_timeout=5, @@ -391,10 +398,10 @@ def test_sniff_after_n_seconds(self): self.assertEqual("http://1.1.1.1:123", t.get_connection().host) self.assertTrue(time.time() - 1 < t.last_sniff < time.time() + 0.01) - def test_sniff_7x_publish_host(self): + def test_sniff_7x_publish_host(self) -> None: # Test the response shaped when a 7.x node has publish_host set # and the returend data is shaped in the fqdn/ip:port format. - t = Transport( + t: Any = Transport( [{"data": CLUSTER_NODES_7x_PUBLISH_HOST}], connection_class=DummyConnection, sniff_timeout=42, diff --git a/test_opensearchpy/test_types/aliased_types.py b/test_opensearchpy/test_types/aliased_types.py index f7a93e09..6d4a5a64 100644 --- a/test_opensearchpy/test_types/aliased_types.py +++ b/test_opensearchpy/test_types/aliased_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/async_types.py b/test_opensearchpy/test_types/async_types.py index b26b5d67..e6275662 100644 --- a/test_opensearchpy/test_types/async_types.py +++ b/test_opensearchpy/test_types/async_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/test_types/sync_types.py b/test_opensearchpy/test_types/sync_types.py index d772342b..df6634c4 100644 --- a/test_opensearchpy/test_types/sync_types.py +++ b/test_opensearchpy/test_types/sync_types.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to diff --git a/test_opensearchpy/utils.py b/test_opensearchpy/utils.py index 0c07a012..50682d35 100644 --- a/test_opensearchpy/utils.py +++ b/test_opensearchpy/utils.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -26,11 +27,12 @@ import time +from typing import Any from opensearchpy import OpenSearch -def wipe_cluster(client): +def wipe_cluster(client: Any) -> None: """Wipes a cluster clean between test cases""" close_after_wipe = False try: @@ -58,9 +60,9 @@ def wipe_cluster(client): client.close() -def wipe_cluster_settings(client): +def wipe_cluster_settings(client: Any) -> None: settings = client.cluster.get_settings() - new_settings = {} + new_settings: Any = {} for name, value in settings.items(): if value: new_settings.setdefault(name, {}) @@ -70,7 +72,7 @@ def wipe_cluster_settings(client): client.cluster.put_settings(body=new_settings) -def wipe_snapshots(client): +def wipe_snapshots(client: Any) -> None: """Deletes all the snapshots and repositories from the cluster""" in_progress_snapshots = [] @@ -95,14 +97,14 @@ def wipe_snapshots(client): assert in_progress_snapshots == [] -def wipe_data_streams(client): +def wipe_data_streams(client: Any) -> None: try: client.indices.delete_data_stream(name="*", expand_wildcards="all") except Exception: client.indices.delete_data_stream(name="*") -def wipe_indices(client): +def wipe_indices(client: Any) -> None: client.indices.delete( index="*,-.ds-ilm-history-*", expand_wildcards="all", @@ -110,7 +112,7 @@ def wipe_indices(client): ) -def wipe_searchable_snapshot_indices(client): +def wipe_searchable_snapshot_indices(client: Any) -> None: cluster_metadata = client.cluster.state( metric="metadata", filter_path="metadata.indices.*.settings.index.store.snapshot", @@ -120,17 +122,17 @@ def wipe_searchable_snapshot_indices(client): client.indices.delete(index=index) -def wipe_slm_policies(client): +def wipe_slm_policies(client: Any) -> None: for policy in client.slm.get_lifecycle(): client.slm.delete_lifecycle(policy_id=policy["name"]) -def wipe_auto_follow_patterns(client): +def wipe_auto_follow_patterns(client: Any) -> None: for pattern in client.ccr.get_auto_follow_pattern()["patterns"]: client.ccr.delete_auto_follow_pattern(name=pattern["name"]) -def wipe_node_shutdown_metadata(client): +def wipe_node_shutdown_metadata(client: Any) -> None: shutdown_status = client.shutdown.get_node() # If response contains these two keys the feature flag isn't enabled # on this cluster so skip this step now. @@ -142,14 +144,14 @@ def wipe_node_shutdown_metadata(client): client.shutdown.delete_node(node_id=node_id) -def wipe_tasks(client): +def wipe_tasks(client: Any) -> None: tasks = client.tasks.list() for node_name, node in tasks.get("node", {}).items(): for task_id in node.get("tasks", ()): client.tasks.cancel(task_id=task_id, wait_for_completion=True) -def wait_for_pending_tasks(client, filter, timeout=30): +def wait_for_pending_tasks(client: Any, filter: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: tasks = client.cat.tasks(detailed=True).split("\n") @@ -157,7 +159,7 @@ def wait_for_pending_tasks(client, filter, timeout=30): break -def wait_for_pending_datafeeds_and_jobs(client, timeout=30): +def wait_for_pending_datafeeds_and_jobs(client: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if ( @@ -170,7 +172,7 @@ def wait_for_pending_datafeeds_and_jobs(client, timeout=30): break -def wait_for_cluster_state_updates_to_finish(client, timeout=30): +def wait_for_cluster_state_updates_to_finish(client: Any, timeout: int = 30) -> None: end_time = time.time() + timeout while time.time() < end_time: if not client.cluster.pending_tasks().get("tasks", ()): diff --git a/utils/build-dists.py b/utils/build-dists.py index e6706c57..569ed7ea 100644 --- a/utils/build-dists.py +++ b/utils/build-dists.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -37,13 +38,14 @@ import shutil import sys import tempfile +from typing import Any base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) tmp_dir = None -@contextlib.contextmanager -def set_tmp_dir(): +@contextlib.contextmanager # type: ignore +def set_tmp_dir() -> None: global tmp_dir tmp_dir = tempfile.mkdtemp() yield tmp_dir @@ -51,7 +53,7 @@ def set_tmp_dir(): tmp_dir = None -def run(*argv, expect_exit_code=0): +def run(*argv: Any, expect_exit_code: int = 0) -> None: global tmp_dir if tmp_dir is None: os.chdir(base_dir) @@ -69,9 +71,9 @@ def run(*argv, expect_exit_code=0): exit(exit_code or 1) -def test_dist(dist): - with set_tmp_dir() as tmp_dir: - dist_name = re.match( +def test_dist(dist: Any) -> None: + with set_tmp_dir() as tmp_dir: # type: ignore + dist_name = re.match( # type: ignore r"^(opensearchpy\d*)-", os.path.basename(dist) .replace("opensearch-py", "opensearchpy") @@ -179,7 +181,7 @@ def test_dist(dist): ) -def main(): +def main() -> None: run("git", "checkout", "--", "setup.py", "opensearchpy/") run("rm", "-rf", "build/", "dist/*", "*.egg-info", ".eggs") run("python", "setup.py", "sdist", "bdist_wheel") @@ -187,9 +189,13 @@ def main(): # Grab the major version to be used as a suffix. version_path = os.path.join(base_dir, "opensearchpy/_version.py") with open(version_path) as f: - version = re.search( - r"^__versionstr__\s+=\s+[\"\']([^\"\']+)[\"\']", f.read(), re.M - ).group(1) + data = f.read() + m = re.search(r"^__versionstr__: str\s+=\s+[\"\']([^\"\']+)[\"\']", data, re.M) + if m: + version = m.group(1) + else: + raise Exception(f"Invalid version {data}") + major_version = version.split(".")[0] # If we're handed a version from the build manager we @@ -211,7 +217,7 @@ def main(): # alpha/beta/rc -> aN/bN/rcN else: pre_number = re.search(r"-(a|b|rc)(?:lpha|eta|)(\d+)$", expect_version) - version = version + pre_number.group(1) + pre_number.group(2) + version = version + pre_number.group(1) + pre_number.group(2) # type: ignore expect_version = re.sub( r"(?:-(?:SNAPSHOT|alpha\d+|beta\d+|rc\d+))+$", "", expect_version diff --git a/utils/generate-api.py b/utils/generate-api.py index 40aea2ba..792446dd 100644 --- a/utils/generate-api.py +++ b/utils/generate-api.py @@ -4,6 +4,11 @@ # The OpenSearch Contributors require contributions made to # this file be licensed under the Apache-2.0 license or a # compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. + + # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. @@ -25,14 +30,17 @@ # specific language governing permissions and limitations # under the License. +import json import os import re from functools import lru_cache from itertools import chain, groupby from operator import itemgetter from pathlib import Path +from typing import Any, Dict import black +import deepmerge import requests import unasync import urllib3 @@ -71,41 +79,32 @@ ) -def blacken(filename): +def blacken(filename: Any) -> None: runner = CliRunner() result = runner.invoke(black.main, [str(filename)]) assert result.exit_code == 0, result.output @lru_cache() -def is_valid_url(url): +def is_valid_url(url: str) -> bool: return 200 <= http.request("HEAD", url).status < 400 class Module: - def __init__(self, namespace, is_pyi=False): - self.namespace = namespace - self.is_pyi = is_pyi - self._apis = [] + def __init__(self, namespace: str) -> None: + self.namespace: Any = namespace + self._apis: Any = [] self.parse_orig() - if not is_pyi: - self.pyi = Module(namespace, is_pyi=True) - self.pyi.orders = self.orders[:] - - def add(self, api): + def add(self, api: Any) -> None: self._apis.append(api) - def parse_orig(self): + def parse_orig(self) -> None: self.orders = [] - self.header = "" - if self.is_pyi is True: - self.header = "from typing import Any, Collection, MutableMapping, Optional, Tuple, Union\n\n" + self.header = "from typing import Any, Collection, Optional, Tuple, Union\n\n" namespace_new = "".join(word.capitalize() for word in self.namespace.split("_")) - self.header = ( - self.header + "class " + namespace_new + "Client(NamespacedClient):" - ) + self.header += "class " + namespace_new + "Client(NamespacedClient):" if os.path.exists(self.filepath): with open(self.filepath) as f: content = f.read() @@ -120,22 +119,27 @@ def parse_orig(self): for line in content.split("\n"): header_lines.append(line) if line.startswith("class"): + if "security.py" in str(self.filepath): + # TODO: FIXME, import code + header_lines.append( + " from ._patch import health_check, update_audit_config # type: ignore" + ) break self.header = "\n".join(header_lines) self.orders = re.findall( r"\n (?:async )?def ([a-z_]+)\(", content, re.MULTILINE ) - def _position(self, api): + def _position(self, api: Any) -> Any: try: return self.orders.index(api.name) except ValueError: return len(self.orders) - def sort(self): + def sort(self) -> None: self._apis.sort(key=self._position) - def dump(self): + def dump(self) -> None: self.sort() # This code snippet adds headers to each generated module indicating that the code is generated. @@ -230,22 +234,15 @@ def dump(self): with open(self.filepath, "w") as f: f.write(file_content) - if not self.is_pyi: - self.pyi.dump() - @property - def filepath(self): - return ( - CODE_ROOT - / f"opensearchpy/_async/client/{self.namespace}.py{'i' if self.is_pyi else ''}" - ) + def filepath(self) -> Any: + return CODE_ROOT / f"opensearchpy/_async/client/{self.namespace}.py" class API: - def __init__(self, namespace, name, definition, is_pyi=False): + def __init__(self, namespace: str, name: str, definition: Any) -> None: self.namespace = namespace self.name = name - self.is_pyi = is_pyi # overwrite the dict to maintain key order definition["params"] = { @@ -256,6 +253,7 @@ def __init__(self, namespace, name, definition, is_pyi=False): self.description = "" self.doc_url = "" self.stability = self._def.get("stability", "stable") + self.deprecation_message = self._def.get("deprecation_message") if isinstance(definition["documentation"], str): self.doc_url = definition["documentation"] @@ -287,15 +285,16 @@ def __init__(self, namespace, name, definition, is_pyi=False): print(f"URL {revised_url!r}, falling back on {self.doc_url!r}") @property - def all_parts(self): + def all_parts(self) -> Dict[str, str]: parts = {} for url in self._def["url"]["paths"]: parts.update(url.get("parts", {})) for p in parts: - parts[p]["required"] = all( - p in url.get("parts", {}) for url in self._def["url"]["paths"] - ) + if "required" not in parts[p]: + parts[p]["required"] = all( + p in url.get("parts", {}) for url in self._def["url"]["paths"] + ) parts[p]["type"] = "Any" # This piece of logic corresponds to calling @@ -311,7 +310,7 @@ def all_parts(self): dynamic, components = self.url_parts - def ind(item): + def ind(item: Any) -> Any: try: return components.index(item[0]) except ValueError: @@ -321,29 +320,29 @@ def ind(item): return parts @property - def params(self): + def params(self) -> Any: parts = self.all_parts params = self._def.get("params", {}) return chain( - ((p, parts[p]) for p in parts if parts[p]["required"]), + ((p, parts[p]) for p in parts if parts[p]["required"]), # type: ignore (("body", self.body),) if self.body else (), ( (p, parts[p]) for p in parts - if not parts[p]["required"] and p not in params + if not parts[p]["required"] and p not in params # type: ignore ), sorted(params.items(), key=lambda x: (x[0] not in parts, x[0])), ) @property - def body(self): + def body(self) -> Any: b = self._def.get("body", {}) if b: b.setdefault("required", False) return b @property - def query_params(self): + def query_params(self) -> Any: return ( k for k in sorted(self._def.get("params", {}).keys()) @@ -351,7 +350,7 @@ def query_params(self): ) @property - def all_func_params(self): + def all_func_params(self) -> Any: """Parameters that will be in the '@query_params' decorator list and parameters that will be in the function signature. This doesn't include @@ -364,23 +363,27 @@ def all_func_params(self): return params @property - def path(self): + def path(self) -> Any: return max( (path for path in self._def["url"]["paths"]), key=lambda p: len(re.findall(r"\{([^}]+)\}", p["path"])), ) @property - def method(self): + def method(self) -> Any: # To adhere to the HTTP RFC we shouldn't send # bodies in GET requests. default_method = self.path["methods"][0] + if self.name == "refresh" or self.name == "flush": + return "POST" if self.body and default_method == "GET" and "POST" in self.path["methods"]: return "POST" + if "POST" and "PUT" in self.path["methods"] and self.name != "bulk": + return "PUT" return default_method @property - def url_parts(self): + def url_parts(self) -> Any: path = self.path["path"] dynamic = "{" in path @@ -401,21 +404,18 @@ def url_parts(self): return dynamic, parts @property - def required_parts(self): + def required_parts(self) -> Any: parts = self.all_parts - required = [p for p in parts if parts[p]["required"]] + required = [p for p in parts if parts[p]["required"]] # type: ignore if self.body.get("required"): required.append("body") return required - def to_python(self): - if self.is_pyi: - t = jinja_env.get_template("base_pyi") - else: - try: - t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") - except TemplateNotFound: - t = jinja_env.get_template("base") + def to_python(self) -> Any: + try: + t = jinja_env.get_template(f"overrides/{self.namespace}/{self.name}") + except TemplateNotFound: + t = jinja_env.get_template("base") return t.render( api=self, @@ -424,7 +424,7 @@ def to_python(self): ) -def read_modules(): +def read_modules() -> Any: modules = {} # Load the OpenAPI specification file @@ -437,6 +437,9 @@ def read_modules(): for path in data["paths"]: for x in data["paths"][path]: + if data["paths"][path][x]["x-operation-group"] == "nodes.hot_threads": + if "deprecated" in data["paths"][path][x]: + continue data["paths"][path][x].update({"path": path, "method": x}) list_of_dicts.append(data["paths"][path][x]) @@ -468,18 +471,31 @@ def read_modules(): for m in params: A = dict(type=m["schema"]["type"], description=m["description"]) + + if "default" in m["schema"]: + A.update({"default": m["schema"]["default"]}) + if "enum" in m["schema"]: A.update({"type": "enum"}) A.update({"options": m["schema"]["enum"]}) - if "deprecated" in m: - A.update({"deprecated": m["deprecated"]}) + if "deprecated" in m["schema"]: + A.update({"deprecated": m["schema"]["deprecated"]}) + A.update( + {"deprecation_message": m["schema"]["x-deprecation-message"]} + ) params_new.update({m["name"]: A}) # Removing the deprecated "type" - if "type" in params_new: + if p["x-operation-group"] != "nodes.hot_threads" and "type" in params_new: params_new.pop("type") + if ( + p["x-operation-group"] == "cluster.health" + and "ensure_node_commissioned" in params_new + ): + params_new.pop("ensure_node_commissioned") + if bool(params_new): p.update({"params": params_new}) @@ -491,6 +507,9 @@ def read_modules(): if "description" in n: B.update({"description": n["description"]}) + if "x-enum-options" in n["schema"]: + B.update({"options": n["schema"]["x-enum-options"]}) + deprecated_new = {} if "deprecated" in n: B.update({"deprecated": n["deprecated"]}) @@ -524,6 +543,8 @@ def read_modules(): # Group the data in the current group by the "path" key paths = [] + all_paths_have_deprecation = True + for key2, value2 in groupby(value, key=itemgetter("path")): # Extract the HTTP methods from the data in the current subgroup methods = [] @@ -536,6 +557,11 @@ def read_modules(): documentation = {"description": z["description"]} api.update({"documentation": documentation}) + if "x-deprecation-message" in z: + x_deprecation_message = z["x-deprecation-message"] + else: + all_paths_have_deprecation = False + if "params" not in api and "params" in z: api.update({"params": z["params"]}) @@ -571,8 +597,8 @@ def read_modules(): if "POST" in methods or "PUT" in methods: api.update( { - "stability": "stable", - "visibility": "public", + "stability": "stable", # type: ignore + "visibility": "public", # type: ignore "headers": { "accept": ["application/json"], "content_type": ["application/json"], @@ -582,8 +608,8 @@ def read_modules(): else: api.update( { - "stability": "stable", - "visibility": "public", + "stability": "stable", # type: ignore + "visibility": "public", # type: ignore "headers": {"accept": ["application/json"]}, } ) @@ -603,17 +629,31 @@ def read_modules(): paths.append({"path": key2, "methods": methods}) api.update({"url": {"paths": paths}}) + if all_paths_have_deprecation and x_deprecation_message is not None: + api.update({"deprecation_message": x_deprecation_message}) + + api = apply_patch(namespace, name, api) if namespace not in modules: modules[namespace] = Module(namespace) modules[namespace].add(API(namespace, name, api)) - modules[namespace].pyi.add(API(namespace, name, api, is_pyi=True)) return modules -def dump_modules(modules): +def apply_patch(namespace: str, name: str, api: Any) -> Any: + override_file_path = ( + CODE_ROOT / "utils/templates/overrides" / namespace / f"{name}.json" + ) + if os.path.exists(override_file_path): + with open(override_file_path) as f: + override_json = json.load(f) + api = deepmerge.always_merger.merge(api, override_json) + return api + + +def dump_modules(modules: Any) -> None: for mod in modules.values(): mod.dump() @@ -636,10 +676,9 @@ def dump_modules(modules): filepaths = [] for root, _, filenames in os.walk(CODE_ROOT / "opensearchpy/_async"): for filename in filenames: - if filename.rpartition(".")[-1] in ( - "py", - "pyi", - ) and not filename.startswith("utils.py"): + if filename.rpartition(".")[-1] in ("py",) and not filename.startswith( + "utils.py" + ): filepaths.append(os.path.join(root, filename)) unasync.unasync_files(filepaths, rules) diff --git a/utils/generated_file_headers.txt b/utils/generated_file_headers.txt index 16c8aba5..135828ce 100644 --- a/utils/generated_file_headers.txt +++ b/utils/generated_file_headers.txt @@ -1,8 +1,8 @@ # ---------------------------------------------------- -# THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST. -# -# To contribute, please make necessary modifications to either "Python generator": +# THIS CODE IS GENERATED AND MANUAL EDITS WILL BE LOST. +# +# To contribute, kindly make essential modifications through either the "opensearch-py client generator": # https://github.com/opensearch-project/opensearch-py/blob/main/utils/generate-api.py -# or "OpenAPI specs": +# or the "OpenSearch API specification" available at: # https://github.com/opensearch-project/opensearch-api-specification/blob/main/OpenSearch.openapi.json -# ----------------------------------------------------- \ No newline at end of file +# ----------------------------------------------------- diff --git a/utils/license-headers.py b/utils/license-headers.py index 255097d8..e0f31b59 100644 --- a/utils/license-headers.py +++ b/utils/license-headers.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # # The OpenSearch Contributors require contributions made to @@ -6,24 +7,6 @@ # # Modifications Copyright OpenSearch Contributors. See # GitHub history for details. -# -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - """Script which verifies that all source files have a license header. Has two modes: 'fix' and 'check'. 'fix' fixes problems, 'check' will @@ -33,20 +16,20 @@ import os import re import sys -from itertools import chain from typing import Iterator, List -lines_to_keep = ["# -*- coding: utf-8 -*-\n", "#!/usr/bin/env python\n"] -license_header_lines = [ - "# SPDX-License-Identifier: Apache-2.0\n", - "#\n", - "# The OpenSearch Contributors require contributions made to\n", - "# this file be licensed under the Apache-2.0 license or a\n", - "# compatible open source license.\n", - "#\n", - "# Modifications Copyright OpenSearch Contributors. See\n", - "# GitHub history for details.\n", -] +lines_to_keep = ["# -*- coding: utf-8 -*-", "#!/usr/bin/env python"] + +license_header = """ +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# +# Modifications Copyright OpenSearch Contributors. See +# GitHub history for details. +""".strip() def find_files_to_fix(sources: List[str]) -> Iterator[str]: @@ -65,22 +48,20 @@ def find_files_to_fix(sources: List[str]) -> Iterator[str]: def does_file_need_fix(filepath: str) -> bool: - if not re.search(r"\.pyi?$", filepath): + if not re.search(r"\.py$", filepath): return False + existing_header = "" with open(filepath, mode="r") as f: - first_license_line = None for line in f: - if line == license_header_lines[0]: - first_license_line = line + line = line.strip() + if len(line) == 0 or line in lines_to_keep: + pass + elif line[0] == "#": + existing_header += line + existing_header += "\n" + else: break - elif line not in lines_to_keep: - return True - for header_line, line in zip( - license_header_lines, chain((first_license_line,), f) - ): - if line != header_line: - return True - return False + return not existing_header.startswith(license_header) def add_header_to_file(filepath: str) -> None: @@ -88,16 +69,16 @@ def add_header_to_file(filepath: str) -> None: lines = list(f) i = 0 for i, line in enumerate(lines): - if line not in lines_to_keep: + if len(line) > 0 and line not in lines_to_keep: break - lines = lines[:i] + license_header_lines + lines[i:] + lines = lines[:i] + [license_header] + lines[i:] with open(filepath, mode="w") as f: f.truncate() f.write("".join(lines)) print(f"Fixed {os.path.relpath(filepath, os.getcwd())}") -def main(): +def main() -> None: mode = sys.argv[1] assert mode in ("fix", "check") sources = [os.path.abspath(x) for x in sys.argv[2:]] diff --git a/utils/templates/base b/utils/templates/base index 4a1249ed..54db3451 100644 --- a/utils/templates/base +++ b/utils/templates/base @@ -1,6 +1,6 @@ - @query_params({{ api.query_params|map("tojson")|join(", ")}}) - async def {{ api.name }}(self, {% include "func_params" %}): + @query_params({{ api.query_params|map("tojson")|join(", ")}}) + async def {{ api.name }}(self, {% include "func_params" %}) -> Any: """ {% if api.description %} {{ api.description|replace("\n", " ")|wordwrap(wrapstring="\n ") }} @@ -20,13 +20,20 @@ {% if api.params|list|length %} {% for p, info in api.params %} + {% if info.description %} {% filter wordwrap(72, wrapstring="\n ") %} - :arg {{ p }}: {{ info.description }}{% if info.options %} Valid choices: {{ info.options|join(", ") }}{% endif %}{% if info.default %} Default: {{ info.default }}{% endif %} + :arg {{ p }}{% if info.deprecated %} (Deprecated: {{ info['deprecation_message'][:-1] }}.){% endif %}: {{ info.description }} {% if info.options %}Valid choices are {{ info.options|join(", ") }}.{% endif %} + {% if info.default is defined %}{% if info.default is not none %}{% if info.default is sameas(false) %}Default is false.{% else %}Default is {{ info.default }}.{% endif %}{% endif %}{% endif %} {% endfilter %} + {% endif %} {% endfor %} {% endif %} """ + {% if api.deprecation_message %} + from warnings import warn + warn("Deprecated: {{ api.deprecation_message }}") + {% endif %} {% include "substitutions" %} {% include "required" %} {% if api.body.serialize == "bulk" %} diff --git a/utils/templates/base_pyi b/utils/templates/base_pyi deleted file mode 100644 index c4dbde15..00000000 --- a/utils/templates/base_pyi +++ /dev/null @@ -1,2 +0,0 @@ - - async def {{ api.name }}(self, {% include "func_params_pyi" %}) -> {% if api.method == 'HEAD' %}bool{% else %}Any{% endif %}: ... diff --git a/utils/templates/func_params b/utils/templates/func_params index 067e8f12..cbb976ed 100644 --- a/utils/templates/func_params +++ b/utils/templates/func_params @@ -1,14 +1,15 @@ {% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}, {% endif %} + {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} {% endfor %} {% if api.body %} - body{% if not api.body.required %}=None{% endif %}, + body{% if not api.body.required %}: Any=None{% else %}: Any{% endif %}, {% endif %} {% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}=None, {% endif %} + {% if not info.required and not info.type == 'Any' %}{{ p }}: Optional[{{ info.type }}]=None, {% endif %} + {% if not info.required and info.type == 'Any' %}{{ p }}: {{ info.type }}=None, {% endif %} {% endfor %} -params=None, -headers=None +params: Any=None, +headers: Any=None, \ No newline at end of file diff --git a/utils/templates/func_params_pyi b/utils/templates/func_params_pyi deleted file mode 100644 index cd48f9a6..00000000 --- a/utils/templates/func_params_pyi +++ /dev/null @@ -1,26 +0,0 @@ -{% for p, info in api.all_parts.items() %} - {% if info.required %}{{ p }}: {{ info.type }}, {% endif %} -{% endfor %} - -*, - -{% if api.body %} - body{% if not api.body.required %}: Optional[Any]=...{% else %}: Any{% endif %}, -{% endif %} - -{% for p, info in api.all_parts.items() %} - {% if not info.required %}{{ p }}: Optional[{{ info.type }}]=..., {% endif %} -{% endfor %} - -{% for p in api.query_params %} - {{ p }}: Optional[Any]=..., -{% endfor %} - -{% for p, p_type in global_query_params.items() %} - {% if p not in api.all_func_params %} - {{ p }}: {{ p_type }}=..., - {% endif %} -{% endfor %} - -params: Optional[MutableMapping[str, Any]]=..., -headers: Optional[MutableMapping[str, str]]=..., diff --git a/utils/templates/overrides/indices/put_mapping.json b/utils/templates/overrides/indices/put_mapping.json new file mode 100644 index 00000000..4409c446 --- /dev/null +++ b/utils/templates/overrides/indices/put_mapping.json @@ -0,0 +1,20 @@ +{ + "url": { + "paths": [ + { + "path": "/{index}/_mapping", + "methods": [ + "POST", + "PUT" + ], + "parts": { + "index": { + "type": "string", + "description": "Comma-separated list of indices; use `_all` or empty string to perform the operation on all indices.", + "required": false + } + } + } + ] + } +} \ No newline at end of file