Skip to content

Commit

Permalink
Do not fail on empty log line (#818)
Browse files Browse the repository at this point in the history
* do not fail on empty log line

* add unittest to check log entries w.o. log key are handled
  • Loading branch information
YevheniiSemendiak authored Jul 20, 2023
1 parent 16dc36e commit f50bdf1
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 2 deletions.
2 changes: 1 addition & 1 deletion platform_monitoring/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ async def _iterate(self) -> AsyncIterator[bytes]:
if self._debug and key:
yield f"~~~ From file {basename(key)}\n".encode()
key = ""
log = event["log"]
log = event.get("log", "")
yield self.encode_log(time_str, log)
except Exception:
logger.exception("Invalid log entry: %r", line)
Expand Down
52 changes: 51 additions & 1 deletion tests/unit/test_logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
from collections.abc import AsyncIterator, Callable, Sequence
from datetime import datetime, timezone
from datetime import datetime, timezone, timedelta
from typing import Any
from unittest import mock

Expand Down Expand Up @@ -30,6 +31,27 @@ async def paginate(

return _setup

@pytest.fixture
def setup_s3_key_content(
self,
s3_client: mock.Mock,
) -> Callable[[dict[str, list[str]]], None]:
def _setup(content: dict[str, list[str]]) -> None:
async def get_object(
Key: str, *args: Any, **kwargs: Any
) -> dict[str, Any]:
async def _iter() -> AsyncIterator[str]:
for line in content[Key]:
yield line
body = mock.AsyncMock()
body.iter_lines = mock.MagicMock()
body.iter_lines.return_value = mock.AsyncMock()
body.iter_lines.return_value.__aiter__.side_effect = _iter
return {"ContentType": "", "Body": body}

s3_client.get_object = get_object
return _setup

@pytest.fixture
def log_reader(self, s3_client: mock.Mock) -> S3LogReader:
return S3LogReader(s3_client, "", "", "", "", "", "")
Expand Down Expand Up @@ -63,3 +85,31 @@ async def test_keys_sorted_by_time(
]
dt = datetime(2021, 1, 31, 12, 3, 0, tzinfo=timezone.utc)
assert list(await log_reader._load_log_keys(dt)) == []

async def test_iterate_log_chunks(
self,
log_reader: S3LogReader,
setup_s3_pages: Callable[[Sequence[dict[str, Any]]], None],
setup_s3_key_content: Callable[[dict[str, Any]], None],
) -> None:
now = datetime.now(tz=timezone.utc)
now_as_key = now.strftime("%Y%m%d%H%M")
setup_s3_pages([{"Contents": [{"Key": f"s3-key/{now_as_key}_0.gz"}]}])

log_lines = ["qwe\n", "line2", "line3", "", "\n\n\n", "something here"]

def later_iso(sec: int = 0) -> str:
return (now + timedelta(seconds=sec)).isoformat()
stored_lines = []
for i, line in enumerate(log_lines):
stored_line = {"time": later_iso(i)}
if line:
stored_line["log"] = line
stored_lines.append(json.dumps(stored_line))
setup_s3_key_content({f"s3-key/{now_as_key}_0.gz": stored_lines})

res = []
async with log_reader as it:
async for chunk in it:
res.append(chunk.decode()[:-1]) # -1 implies removal of \n
assert log_lines == res

0 comments on commit f50bdf1

Please sign in to comment.