Skip to content

Commit

Permalink
[uss_qualifier] debug aggregate queries stats for #343
Browse files Browse the repository at this point in the history
  • Loading branch information
Shastick committed Nov 16, 2023
1 parent 2825146 commit 9953a9e
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 5 deletions.
3 changes: 2 additions & 1 deletion monitoring/monitorlib/fetch/evaluation.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ def get_init_subsequent_queries_durations(
init_durations: List[float] = list() # list of initial queries duration
subsequent_durations: List[float] = list() # list of subsequent queries duration

for queries in queries_by_url.values():
for url, queries_iter in queries_by_url.items():
queries = queries_iter.copy()
queries.sort(key=attrgetter("request.initiated_at")) # sort queries by time

for idx, query in enumerate(queries):
Expand Down
41 changes: 41 additions & 0 deletions monitoring/monitorlib/fetch/evaluation_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from datetime import datetime

from implicitdict import StringBasedDateTime

from monitoring.monitorlib.fetch import (
Query,
RequestDescription,
ResponseDescription,
evaluation,
)


def test_operator_id_non_ascii():
assert True == False


def _fq(ts: int, elapsed: float):
"""returns a fake query with the specified epoch timestamp as 'initiated_at'"""
return Query(
request=RequestDescription(
method=None,
url=None,
initiated_at=StringBasedDateTime(datetime.fromtimestamp(ts)),
),
response=ResponseDescription(elapsed_s=elapsed, reported=None),
)


def test_get_init_subsequent_queries_durations():
dummy_queries = [_fq(10, 2), _fq(12, 1), _fq(13, 0.9), _fq(14, 0.8)]
query_dict = {"some-url": dummy_queries}
(init_d, subseq_d) = evaluation.get_init_subsequent_queries_durations(5, query_dict)

assert init_d == [2]
assert subseq_d == [1, 0.9, 0.8]

query_dict = {"some-url": dummy_queries, "another-url": dummy_queries}

(init_d, subseq_d) = evaluation.get_init_subsequent_queries_durations(5, query_dict)
assert init_d == [2, 2]
assert subseq_d == [1, 0.9, 0.8, 1, 0.9, 0.8]
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def _init_queries(self, context: ExecutionContext):

for query in self._queries:
for base_url, participant in self._participants_by_base_url.items():
if query.request.url.startswith(base_url):
self._queries_by_participant[participant].append(query)
# TODO opportunity here to set the participant_id on the query if it's not already there
# maybe do so after most/all queries have been tagged at the call site where possible
if query.request.url.startswith(
base_url
) and not query.has_field_with_value("participant_id"):
query.participant_id = participant
break

# Only consider queries with the participant/server explicitly identified
Expand Down Expand Up @@ -372,3 +372,8 @@ def _dp_display_data_times_step(self):
f"{participant}/display_data",
f"percentiles on {len(relevant_queries)} relevant queries ({len(relevant_queries_by_url)} different URLs, {len(init_durations)} initial queries, {len(subsequent_durations)} subsequent queries): init 95th: {init_95th}; init 99th: {init_99th}; subsequent 95th: {subsequent_95th}; subsequent 99th: {subsequent_99th}",
)

self.record_note(
f"{participant}/display_data details",
f"Initial durations: {init_durations} subsequent durations: {subsequent_durations}",
)

0 comments on commit 9953a9e

Please sign in to comment.