Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
aspicer committed Jun 17, 2024
1 parent 70a0182 commit f17a540
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 102 deletions.
167 changes: 68 additions & 99 deletions posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,20 +187,22 @@ def _create_query_runner(
hogql_modifiers: Optional[HogQLQueryModifiers] = None,
limit_context: Optional[LimitContext] = None,
explicit_date: Optional[bool] = None,
skip_cache_tests: bool = False,
) -> TrendsQueryRunner:
query_series: list[EventsNode | ActionsNode] = [EventsNode(event="$pageview")] if series is None else series
self._test_cache(
date_from,
date_to,
interval,
query_series,
trends_filters,
breakdown,
filter_test_accounts,
hogql_modifiers,
limit_context,
explicit_date,
)
if not skip_cache_tests:
self._test_cache(
date_from,
date_to,
interval,
query_series,
trends_filters,
breakdown,
filter_test_accounts,
hogql_modifiers,
limit_context,
explicit_date,
)
query = TrendsQuery(
dateRange=InsightDateRange(date_from=date_from, date_to=date_to, explicitDate=explicit_date),
interval=interval,
Expand Down Expand Up @@ -344,12 +346,7 @@ def test_query_can_compute_from_cache(self):
kwargs["breakdown"] = BreakdownFilter(
**{"breakdown_type": BreakdownType.EVENT, "breakdown": "$browser", "breakdown_histogram_bin_count": 2}
)
self.assertFalse(self._create_query_runner(**kwargs).query_can_compute_from_cache())

kwargs["breakdown"] = BreakdownFilter(
**{"breakdown_type": BreakdownType.EVENT, "breakdown": "$browser", "breakdown_histogram_bin_count": 2}
)
self.assertFalse(self._create_query_runner(**kwargs).query_can_compute_from_cache())
self.assertFalse(self._create_query_runner(**kwargs, skip_cache_tests=True).query_can_compute_from_cache())

@patch(
"posthog.hogql_queries.insights.trends.test.test_trends_query_runner.TrendsQueryRunner.query_can_compute_from_cache",
Expand Down Expand Up @@ -2367,6 +2364,17 @@ def test_sampling_adjustment(self):
assert response.results[0]["aggregated_value"] > 5 and response.results[0]["aggregated_value"] < 30

def test_no_results_before_and_after_compare(self):
def spawn_runner():
return self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)

for value in list(range(30)):
_create_event(
team=self.team,
Expand All @@ -2377,36 +2385,27 @@ def test_no_results_before_and_after_compare(self):
)

with freeze_time("2020-01-18"):
runner = self._create_query_runner(
runner = spawn_runner()
first_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(first_response.results) == 2
with freeze_time("2020-01-21"):
runner = spawn_runner()
with patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped:
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 2
wrapped.assert_called_once()

def test_no_results_before_and_after_no_compare(self):
def spawn_runner():
return self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)
first_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(first_response.results) == 2

runner = self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)
with (
freeze_time("2020-01-21"),
patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped,
):
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 2
wrapped.assert_called_once()

def test_no_results_before_and_after_no_compare(self):
for value in list(range(30)):
_create_event(
team=self.team,
Expand All @@ -2417,32 +2416,16 @@ def test_no_results_before_and_after_no_compare(self):
)

with freeze_time("2020-01-18"):
runner = self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
)
runner = spawn_runner()
first_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(first_response.results) == 2

runner = self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
)
with (
freeze_time("2020-01-21"),
patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped,
):
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 0
wrapped.assert_called_once()
with freeze_time("2020-01-21"):
runner = spawn_runner()
with patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped:
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 0
wrapped.assert_called_once()

# If more time has passed than the window, don't compute from cache (no savings)
def test_no_results_then_previous(self):
Expand Down Expand Up @@ -2486,8 +2469,8 @@ def test_no_results_then_previous(self):
wrapped.assert_not_called()

def test_no_results_then_current(self):
with freeze_time("2020-01-10"):
runner = self._create_query_runner(
def spawn_runner():
return self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
Expand All @@ -2496,6 +2479,9 @@ def test_no_results_then_current(self):
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)

with freeze_time("2020-01-10"):
runner = spawn_runner()
first_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(first_response.results) == 0

Expand All @@ -2508,26 +2494,16 @@ def test_no_results_then_current(self):
properties={"breakdown_value": f"{value % 2}"},
)

runner = self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)
with (
freeze_time("2020-01-12"),
patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped,
):
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 2
wrapped.assert_called_once()
with freeze_time("2020-01-12"):
runner = spawn_runner()
with patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped:
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 2
wrapped.assert_called_once()

def test_no_results_then_no_results(self):
with freeze_time("2020-01-10"):
runner = self._create_query_runner(
def spawn_runner():
return self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
Expand All @@ -2536,25 +2512,18 @@ def test_no_results_then_no_results(self):
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)

with freeze_time("2020-01-10"):
runner = spawn_runner()
first_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(first_response.results) == 0

runner = self._create_query_runner(
"-1w",
None,
IntervalType.DAY,
[EventsNode(event="$pageview")],
TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH),
BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.EVENT),
CompareFilter(compare=True),
)
with (
freeze_time("2020-01-12"),
patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped,
):
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 0
wrapped.assert_called_once()
with freeze_time("2020-01-12"):
runner = spawn_runner()
with patch.object(runner, "to_cached_queries", wraps=runner.to_cached_queries) as wrapped:
second_response = cast(CachedTrendsQueryResponse, runner.run(ExecutionMode.CALCULATE_BLOCKING_ALWAYS))
assert len(second_response.results) == 0
wrapped.assert_called_once()

def test_cache_with_hours_does_nothing(self):
def spawn_runner():
Expand Down
3 changes: 0 additions & 3 deletions posthog/hogql_queries/query_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -507,9 +507,6 @@ def run(
) -> CR | CacheMissResponse | QueryStatusResponse:
self.query_id = query_id or self.query_id

# always load cached response (even if we're always calculating) so that we can compare it
self.load_cached_response()

if execution_mode == ExecutionMode.CALCULATE_ASYNC_ALWAYS:
# We should always kick off async calculation and disregard the cache
return self.enqueue_async_calculation(refresh_requested=True, cache_key=self.cache_key, user=user)
Expand Down

0 comments on commit f17a540

Please sign in to comment.