diff --git a/CHANGELOG.md b/CHANGELOG.md index e6980e8..f75e8f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,12 +9,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed +- Specify docker image when publishing a module - Improvements for AsyncConnector. - Improvements for Async Http workflow - Remove duplicated parts and make the code more uniform for async http workflow ### Fixed +- Replace ulrllib.parse.urljoin by posixpath.join in AsyncConnector - Fix tests for async version of connector. ## 1.16.0 - 2024-10-16 diff --git a/sekoia_automation/aio/connector.py b/sekoia_automation/aio/connector.py index c9c462c..52a12b0 100644 --- a/sekoia_automation/aio/connector.py +++ b/sekoia_automation/aio/connector.py @@ -7,7 +7,7 @@ from contextlib import asynccontextmanager from datetime import datetime from pathlib import Path -from urllib.parse import urljoin +from posixpath import join as urljoin from aiohttp import ClientSession from aiolimiter import AsyncLimiter @@ -116,6 +116,13 @@ async def _async_send_chunk( return events_ids + @property + def _batchapi_url(self): + if intake_server := self.configuration.intake_server: + return urljoin(intake_server, "batch") + else: + return urljoin(self.intake_url, "batch") + async def push_data_to_intakes( self, events: Sequence[EventType] ) -> list[str]: # pragma: no cover @@ -128,10 +135,7 @@ async def push_data_to_intakes( Returns: list[str]: """ - if intake_server := self.configuration.intake_server: - batch_api = urljoin(intake_server, "batch") - else: - batch_api = urljoin(self.intake_url, "batch") + self._last_events_time = datetime.utcnow() result_ids = [] @@ -139,7 +143,7 @@ async def push_data_to_intakes( async with self.session() as session: forwarders = [ - self._async_send_chunk(session, batch_api, chunk_index, chunk) + self._async_send_chunk(session, self._batchapi_url, chunk_index, chunk) for chunk_index, chunk in enumerate(chunks) ] async for ids in limit_concurrency(forwarders, self.max_concurrency_tasks): diff --git a/sekoia_automation/scripts/sync_library.py b/sekoia_automation/scripts/sync_library.py index a03e5b9..d06ea31 100755 --- a/sekoia_automation/scripts/sync_library.py +++ b/sekoia_automation/scripts/sync_library.py @@ -363,6 +363,7 @@ def load_module(self, module_path: Path): module_info = json.load(fd) docker_name = self._get_module_docker_name(module_info) + module_info["docker"] = f"{docker_name}:{module_info['version']}" if self.registry_check and not self.check_image_on_registry( docker_name, module_info["version"] ): diff --git a/tests/aio/test_connector.py b/tests/aio/test_connector.py index b3526ae..606e048 100644 --- a/tests/aio/test_connector.py +++ b/tests/aio/test_connector.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator from datetime import datetime from unittest.mock import Mock, patch -from urllib.parse import urljoin +from posixpath import join as urljoin import pytest from aiolimiter import AsyncLimiter @@ -206,6 +206,27 @@ async def test_async_connector_raise_error( assert str(e) == expected_error +@pytest.mark.parametrize( + 'base_url,expected_batchapi_url', + [ + ('http://intake.fake.url/', 'http://intake.fake.url/batch'), + ('http://fake.url/intake/', 'http://fake.url/intake/batch'), + ('http://fake.url/intake', 'http://fake.url/intake/batch'), + ] +) +def test_async_connector_batchapi_url(storage, mocked_trigger_logs, base_url: str, expected_batchapi_url: str): + with patch("sentry_sdk.set_tag"): + async_connector = DummyAsyncConnector(data_path=storage) + + async_connector.trigger_activation = "2022-03-14T11:16:14.236930Z" + async_connector.configuration = { + "intake_key": "", + "intake_server": base_url, + } + + assert async_connector._batchapi_url == expected_batchapi_url + + @pytest.mark.asyncio async def test_async_connector_async_next_run( async_connector: DummyAsyncConnector, faker: Faker diff --git a/tests/scripts/test_sync_library.py b/tests/scripts/test_sync_library.py index 7eadc9c..3ffa774 100644 --- a/tests/scripts/test_sync_library.py +++ b/tests/scripts/test_sync_library.py @@ -68,6 +68,7 @@ def test_no_module_success(tmp_module, module, action, trigger, connector, **kwa assert history[0].headers["Authorization"] == f"Bearer {API_KEY}" assert history[1].method == "PATCH" assert history[1].url == f"{SYMPOHNY_URL}/modules/{module['uuid']}" + assert "docker" in history[1].json() assert history[1].headers["Authorization"] == f"Bearer {API_KEY}" assert history[2].method == "GET" assert history[2].url == f"{SYMPOHNY_URL}/triggers/{trigger['uuid']}" @@ -105,6 +106,7 @@ def test_no_module_404(tmp_module, module, action, trigger, connector, **kwargs) assert history[0].headers["Authorization"] == f"Bearer {API_KEY}" assert history[1].method == "POST" assert history[1].url == f"{SYMPOHNY_URL}/modules" + assert "docker" in history[1].json() assert history[1].headers["Authorization"] == f"Bearer {API_KEY}" assert history[2].method == "GET" assert history[2].url == f"{SYMPOHNY_URL}/triggers/{trigger['uuid']}" @@ -168,6 +170,7 @@ def test_with_module(tmp_module, module, action, trigger, connector, **kwargs): assert history[1].method == "PATCH" assert history[1].url == f"{SYMPOHNY_URL}/modules/{module['uuid']}" assert history[1].headers["Authorization"] == f"Bearer {API_KEY}" + assert "docker" in history[1].json() assert history[2].method == "GET" assert history[2].url == f"{SYMPOHNY_URL}/triggers/{trigger['uuid']}" assert history[2].headers["Authorization"] == f"Bearer {API_KEY}"