diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bd0ac738..f79929bc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -14,6 +14,7 @@ jobs: - { os: 'ubuntu-latest', python-version: "3.10" } - { os: 'ubuntu-latest', python-version: "3.11" } - { os: 'macos-latest', python-version: "3.11" } + - { os: 'windows-latest', python-version: "3.11" } name: test (os=${{ matrix.entry.os }}, python=${{ matrix.entry.python-version }}) continue-on-error: ${{ matrix.entry.experimental || false }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 5bd0ff04..c1c2286b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Added a utf-8 header to all .py files ([#557](https://github.com/opensearch-project/opensearch-py/pull/557)) - Added `samples`, `benchmarks` and `docs` to `nox -rs format` ([#556](https://github.com/opensearch-project/opensearch-py/pull/556)) - Added guide on the document lifecycle API(s) ([#559](https://github.com/opensearch-project/opensearch-py/pull/559)) +- Added Windows CI ([#569](https://github.com/opensearch-project/opensearch-py/pull/569)) ### Changed - Generate `tasks` client from API specs ([#508](https://github.com/opensearch-project/opensearch-py/pull/508)) - Generate `ingest` client from API specs ([#513](https://github.com/opensearch-project/opensearch-py/pull/513)) diff --git a/opensearchpy/_async/http_aiohttp.py b/opensearchpy/_async/http_aiohttp.py index 3c7010ed..f36080e7 100644 --- a/opensearchpy/_async/http_aiohttp.py +++ b/opensearchpy/_async/http_aiohttp.py @@ -183,7 +183,9 @@ def __init__( ssl_context.check_hostname = False ssl_context.verify_mode = ssl.CERT_NONE - ca_certs = self.default_ca_certs() if ca_certs is None else ca_certs + if ca_certs is None: + ca_certs = self.default_ca_certs() + if verify_certs: if not ca_certs: raise ImproperlyConfigured( diff --git a/opensearchpy/connection/base.py b/opensearchpy/connection/base.py index 59418bfa..54308c72 100644 --- a/opensearchpy/connection/base.py +++ b/opensearchpy/connection/base.py @@ -138,6 +138,11 @@ def __eq__(self, other: object) -> bool: raise TypeError("Unsupported equality check for %s and %s" % (self, other)) return self.__hash__() == other.__hash__() + def __lt__(self, other: object) -> bool: + if not isinstance(other, Connection): + raise TypeError("Unsupported lt check for %s and %s" % (self, other)) + return self.__hash__() < other.__hash__() + def __hash__(self) -> int: return id(self) diff --git a/test_opensearchpy/test_async/test_connection.py b/test_opensearchpy/test_async/test_connection.py index 9413d0e8..7969e987 100644 --- a/test_opensearchpy/test_async/test_connection.py +++ b/test_opensearchpy/test_async/test_connection.py @@ -37,7 +37,7 @@ import aiohttp import pytest from _pytest.mark.structures import MarkDecorator -from mock import patch +from mock import MagicMock, patch from multidict import CIMultiDict from pytest import raises @@ -254,26 +254,29 @@ async def test_warns_if_using_non_default_ssl_kwargs_with_ssl_context(self) -> N == str(w[0].message) ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_given_ca_certs( - self, load_verify_locations: Any, tmp_path: Any - ) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_given_ca_certs(self, ssl_context: Any, tmp_path: Any) -> None: path = tmp_path / "ca_certs.pem" path.touch() + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, ca_certs=str(path)) - load_verify_locations.assert_called_once_with(cafile=str(path)) + ssl_context.return_value.load_verify_locations.assert_called_once_with( + cafile=str(path) + ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_default_ca_certs(self, load_verify_locations: Any) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_default_ca_certs(self, ssl_context: Any) -> None: + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True) - load_verify_locations.assert_called_once_with( + ssl_context.return_value.load_verify_locations.assert_called_once_with( cafile=Connection.default_ca_certs() ) - @patch("ssl.SSLContext.load_verify_locations") - async def test_uses_no_ca_certs(self, load_verify_locations: Any) -> None: + @patch("ssl.SSLContext", return_value=MagicMock()) + async def test_uses_no_ca_certs(self, ssl_context: Any) -> None: + ssl_context.return_value.load_verify_locations.return_value = None AIOHttpConnection(use_ssl=True, verify_certs=False) - load_verify_locations.assert_not_called() + ssl_context.return_value.load_verify_locations.assert_not_called() async def test_trust_env(self) -> None: con: Any = AIOHttpConnection(trust_env=True) diff --git a/test_opensearchpy/test_async/test_transport.py b/test_opensearchpy/test_async/test_transport.py index 4ef80707..b494f83f 100644 --- a/test_opensearchpy/test_async/test_transport.py +++ b/test_opensearchpy/test_async/test_transport.py @@ -272,7 +272,7 @@ async def test_add_connection(self) -> None: async def test_request_will_fail_after_X_retries(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}], + [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, ) @@ -287,7 +287,7 @@ async def test_request_will_fail_after_X_retries(self) -> None: async def test_failed_connection_will_be_marked_as_dead(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}] * 2, + [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, ) @@ -381,7 +381,10 @@ async def test_sniff_reuses_connection_instances_if_possible(self) -> None: async def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, @@ -407,7 +410,10 @@ async def test_sniff_on_fail_failing_does_not_prevent_retires( ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = AsyncTransport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3, diff --git a/test_opensearchpy/test_transport.py b/test_opensearchpy/test_transport.py index dc1a8f9e..4b37e3ac 100644 --- a/test_opensearchpy/test_transport.py +++ b/test_opensearchpy/test_transport.py @@ -266,7 +266,7 @@ def test_add_connection(self) -> None: def test_request_will_fail_after_X_retries(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}], + [{"exception": ConnectionError(None, "abandon ship", Exception())}], connection_class=DummyConnection, ) @@ -275,7 +275,7 @@ def test_request_will_fail_after_X_retries(self) -> None: def test_failed_connection_will_be_marked_as_dead(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}] * 2, + [{"exception": ConnectionError(None, "abandon ship", Exception())}] * 2, connection_class=DummyConnection, ) @@ -349,7 +349,10 @@ def test_sniff_reuses_connection_instances_if_possible(self) -> None: def test_sniff_on_fail_triggers_sniffing_on_fail(self) -> None: t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=0, @@ -366,7 +369,10 @@ def test_sniff_on_fail_failing_does_not_prevent_retires( ) -> None: sniff_hosts.side_effect = [TransportError("sniff failed")] t: Any = Transport( - [{"exception": ConnectionError("abandon ship")}, {"data": CLUSTER_NODES}], + [ + {"exception": ConnectionError(None, "abandon ship", Exception())}, + {"data": CLUSTER_NODES}, + ], connection_class=DummyConnection, sniff_on_connection_fail=True, max_retries=3,