diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 2b4d38dd85bd8..1fd9293cfc04c 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -837,93 +837,3 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma self.organization.refresh_from_db() assert self.organization.customer_trust_scores == {"recordings": 0, "events": 15, "rows_synced": 0} - - -class TestActivateBillingAPI(APILicensedTest): - def test_activate_success(self): - url = "/api/billing-v2/activate" - data = {"products": "product_1:plan_1,product_2:plan_2", "redirect_path": "custom/path"} - - response = self.client.get(url, data=data) - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - - self.assertIn("/activate", response.url) - self.assertIn("products=product_1:plan_1,product_2:plan_2", response.url) - url_pattern = r"redirect_uri=http://[^/]+/custom/path" - self.assertRegex(response.url, url_pattern) - - def test_deprecated_activation_success(self): - url = "/api/billing-v2/activate" - data = {"products": "product_1:plan_1,product_2:plan_2", "redirect_path": "custom/path"} - - response = self.client.get(url, data=data) - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - - self.assertIn("/activate", response.url) - self.assertIn("products=product_1:plan_1,product_2:plan_2", response.url) - url_pattern = r"redirect_uri=http://[^/]+/custom/path" - self.assertRegex(response.url, url_pattern) - - def test_activate_with_default_redirect_path(self): - url = "/api/billing-v2/activate" - data = { - "products": "product_1:plan_1,product_2:plan_2", - } - - response = self.client.get(url, data) - - self.assertEqual(response.status_code, status.HTTP_302_FOUND) - self.assertIn("products=product_1:plan_1,product_2:plan_2", response.url) - url_pattern = r"redirect_uri=http://[^/]+/organization/billing" - self.assertRegex(response.url, url_pattern) - - def test_activate_failure(self): - url = "/api/billing-v2/activate" - data = {"none": "nothing"} - - response = self.client.get(url, data) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - - def test_activate_with_plan_error(self): - url = "/api/billing-v2/activate" - data = {"plan": "plan"} - - response = self.client.get(url, data) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "attr": "plan", - "code": "invalid_input", - "detail": "The 'plan' parameter is no longer supported. Please use the 'products' parameter instead.", - "type": "validation_error", - }, - ) - - @patch("ee.billing.billing_manager.BillingManager.deactivate_products") - @patch("ee.billing.billing_manager.BillingManager.get_billing") - def test_deactivate_success(self, mock_get_billing, mock_deactivate_products): - mock_deactivate_products.return_value = MagicMock() - mock_get_billing.return_value = { - "available_features": [], - "products": [], - } - - url = "/api/billing-v2/deactivate" - data = {"products": "product_1"} - - response = self.client.get(url, data) - - self.assertEqual(response.status_code, status.HTTP_200_OK) - mock_deactivate_products.assert_called_once_with(self.organization, "product_1") - mock_get_billing.assert_called_once_with(self.organization, None) - - def test_deactivate_failure(self): - url = "/api/billing-v2/deactivate" - data = {"none": "nothing"} - - response = self.client.get(url, data) - - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index b1eb79a39945c..8f452cd79146c 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -441,26 +441,97 @@ # --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 ''' # --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id, + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + AND distinct_id IN + (SELECT distinct_id + FROM events + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop ''' # --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 @@ -726,42 +797,6 @@ ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, @@ -780,7 +815,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.5 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -828,7 +863,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.6 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -848,7 +883,7 @@ OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.7 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -912,72 +947,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 - ''' - /* user_id:0 request:_snapshot_ */ - SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, - count(*) as count - FROM events e - WHERE team_id = 2 - AND event = '$pageview1' - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) - GROUP BY value - ORDER BY count DESC, value DESC - LIMIT 26 - OFFSET 0 - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.5 ''' /* user_id:0 request:_snapshot_ */ SELECT groupArray(day_start) as date, @@ -1000,17 +970,19 @@ CROSS JOIN (SELECT breakdown_value FROM - (SELECT ['control', 'test_1', 'test_2'] as breakdown_value) ARRAY + (SELECT ['test', 'control'] as breakdown_value) ARRAY JOIN breakdown_value) as sec ORDER BY breakdown_value, day_start UNION ALL SELECT count(*) as total, toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, - transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test_1', 'test_2']), (['control', 'test_1', 'test_2']), '$$_posthog_breakdown_other_$$') as breakdown_value + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['test', 'control']), (['test', 'control']), '$$_posthog_breakdown_other_$$') as breakdown_value FROM events e WHERE e.team_id = 2 - AND event = '$pageview1' - AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND event = '$pageview' + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', '')))) AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') GROUP BY day_start, @@ -1023,7 +995,7 @@ ORDER BY breakdown_value ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.6 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.6 ''' /* user_id:0 request:_snapshot_ */ SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, @@ -1033,50 +1005,258 @@ AND event = '$feature_flag_called' AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - AND ((has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) - AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))))) GROUP BY value ORDER BY count DESC, value DESC LIMIT 26 OFFSET 0 ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.7 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.7 ''' /* user_id:0 request:_snapshot_ */ - SELECT [now()] AS date, - [0] AS total, - '' AS breakdown_value - LIMIT 0 - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone - ''' - /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 2 + AND event = '$feature_flag_called' + AND (((isNull(replaceRegexpAll(JSONExtractRaw(e.properties, 'exclude'), '^"|"$', '')) + OR NOT JSONHas(e.properties, 'exclude'))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants + ''' + /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events WHERE timestamp > date_sub(DAY, 3, now()) AND timestamp < now() GROUP BY team_id ORDER BY age; ''' # --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + /* user_id:0 request:_snapshot_ */ + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview1' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test_1', 'test_2'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test_1', 'test_2']), (['control', 'test_1', 'test_2']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview1' + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND ((has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test_1', 'test_2'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test_1', 'test_2']), (['control', 'test_1', 'test_2']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview1' + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.6 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND ((has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.7 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone + ''' + /* user_id:0 celery:posthog.tasks.tasks.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -1086,35 +1266,149 @@ ORDER BY age; ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.1 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 + ''' +# --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.2 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 07:00:00', 'US/Pacific')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 02:10:00', 'US/Pacific')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['test', 'control'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'US/Pacific')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['test', 'control']), (['test', 'control']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.3 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 ''' /* user_id:0 request:_snapshot_ */ - SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, count(*) as count FROM events e WHERE team_id = 2 - AND event = '$pageview' + AND event = '$feature_flag_called' AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') - AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) GROUP BY value ORDER BY count DESC, value DESC LIMIT 26 OFFSET 0 ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.4 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 07:00:00', 'US/Pacific')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 02:10:00', 'US/Pacific')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'US/Pacific')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 2 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'US/Pacific') >= toDateTime('2020-01-01 02:10:00', 'US/Pacific') + AND toTimeZone(timestamp, 'US/Pacific') <= toDateTime('2020-01-06 07:00:00', 'US/Pacific') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone.5 ''' /* user_id:0 request:_snapshot_ */ @@ -1255,58 +1549,149 @@ # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.1 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND (ifNull(ilike(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, 'hogql'), ''), 'null'), '^"|"$', ''), 'true'), 0))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 26 + OFFSET 0 ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.2 ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['test', 'control'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['test', 'control']), (['test', 'control']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND (ifNull(ilike(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, 'hogql'), ''), 'null'), '^"|"$', ''), 'true'), 0))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ''' # --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.3 - ''' - /* celery:posthog.tasks.tasks.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ''' -# --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.4 ''' /* user_id:0 request:_snapshot_ */ - SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, count(*) as count FROM events e WHERE team_id = 2 - AND event = '$pageview' + AND event = '$feature_flag_called' AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') - AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) - AND (ifNull(ilike(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, 'hogql'), ''), 'null'), '^"|"$', ''), 'true'), 0))) + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) GROUP BY value ORDER BY count DESC, value DESC LIMIT 26 OFFSET 0 ''' # --- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.4 + ''' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, transform(ifNull(nullIf(replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', ''), ''), '$$_posthog_breakdown_null_$$'), (['control', 'test']), (['control', 'test']), '$$_posthog_breakdown_other_$$') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 2 + AND event = '$feature_flag_called' + AND ((has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', ''))) + AND (has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', '')))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ''' +# --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter.5 ''' /* user_id:0 request:_snapshot_ */ diff --git a/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py deleted file mode 100644 index b743302f896bf..0000000000000 --- a/ee/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ /dev/null @@ -1,347 +0,0 @@ -from itertools import product -from unittest import mock -from uuid import uuid4 - -from dateutil.relativedelta import relativedelta -from django.utils.timezone import now -from freezegun import freeze_time -from parameterized import parameterized - -from ee.clickhouse.materialized_columns.columns import materialize -from posthog.clickhouse.client import sync_execute -from posthog.models import Person -from posthog.models.filters import SessionRecordingsFilter -from posthog.schema import PersonsOnEventsMode -from posthog.session_recordings.queries.session_recording_list_from_replay_summary import ( - SessionRecordingListFromReplaySummary, -) -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary -from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL -from posthog.test.base import ( - APIBaseTest, - ClickhouseTestMixin, - QueryMatchingTest, - snapshot_clickhouse_queries, - _create_event, -) - - -@freeze_time("2021-01-01T13:46:23") -class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): - def tearDown(self) -> None: - sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) - - @property - def base_time(self): - return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) - - def create_event( - self, - distinct_id, - timestamp, - team=None, - event_name="$pageview", - properties=None, - ): - if team is None: - team = self.team - if properties is None: - properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} - return _create_event( - team=team, - event=event_name, - timestamp=timestamp, - distinct_id=distinct_id, - properties=properties, - ) - - @parameterized.expand( - [ - [ - "test_poe_v1_still_falls_back_to_person_subquery", - True, - False, - False, - PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS, - { - "kperson_filter_pre__0": "rgInternal", - "kpersonquery_person_filter_fin__0": "rgInternal", - "person_uuid": None, - "vperson_filter_pre__0": ["false"], - "vpersonquery_person_filter_fin__0": ["false"], - }, - True, - False, - ], - [ - "test_poe_being_unavailable_we_fall_back_to_person_subquery", - False, - False, - False, - PersonsOnEventsMode.DISABLED, - { - "kperson_filter_pre__0": "rgInternal", - "kpersonquery_person_filter_fin__0": "rgInternal", - "person_uuid": None, - "vperson_filter_pre__0": ["false"], - "vpersonquery_person_filter_fin__0": ["false"], - }, - True, - False, - ], - [ - "test_allow_denormalised_props_fix_does_not_stop_all_poe_processing", - False, - True, - False, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - { - "event_names": [], - "event_start_time": mock.ANY, - "event_end_time": mock.ANY, - "kglobal_0": "rgInternal", - "vglobal_0": ["false"], - }, - False, - True, - ], - [ - "test_poe_v2_available_person_properties_are_used_in_replay_listing", - False, - True, - True, - PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, - { - "event_end_time": mock.ANY, - "event_names": [], - "event_start_time": mock.ANY, - "kglobal_0": "rgInternal", - "vglobal_0": ["false"], - }, - False, - True, - ], - ] - ) - def test_effect_of_poe_settings_on_query_generated( - self, - _name: str, - poe_v1: bool, - poe_v2: bool, - allow_denormalized_props: bool, - expected_poe_mode: PersonsOnEventsMode, - expected_query_params: dict, - unmaterialized_person_column_used: bool, - materialized_event_column_used: bool, - ) -> None: - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe_v1, - PERSON_ON_EVENTS_V2_OVERRIDE=poe_v2, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalized_props, - ): - assert self.team.person_on_events_mode == expected_poe_mode - materialize("events", "rgInternal", table_column="person_properties") - - filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "rgInternal", - "value": ["false"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - [generated_query, query_params] = session_recording_list_instance.get_query() - assert query_params == { - "clamped_to_storage_ttl": mock.ANY, - "end_time": mock.ANY, - "limit": 51, - "offset": 0, - "start_time": mock.ANY, - "team_id": self.team.id, - **expected_query_params, - } - - json_extract_fragment = ( - "has(%(vperson_filter_pre__0)s, replaceRegexpAll(JSONExtractRaw(properties, %(kperson_filter_pre__0)s)" - ) - materialized_column_fragment = 'AND ( has(%(vglobal_0)s, "mat_pp_rgInternal"))' - - # it will always have one of these fragments - assert (json_extract_fragment in generated_query) or (materialized_column_fragment in generated_query) - - # the unmaterialized person column - assert (json_extract_fragment in generated_query) is unmaterialized_person_column_used - # materialized event column - assert (materialized_column_fragment in generated_query) is materialized_event_column_used - self.assertQueryMatchesSnapshot(generated_query) - - settings_combinations = [ - ["poe v2 and materialized columns allowed", False, True, True], - ["poe v2 and materialized columns off", False, True, False], - ["poe off and materialized columns allowed", False, False, True], - ["poe off and materialized columns not allowed", False, False, False], - ["poe v1 and materialized columns allowed", True, False, True], - ["poe v1 and not materialized columns not allowed", True, False, False], - ] - - # Options for "materialize person columns" - materialization_options = [ - [" with materialization", True], - [" without materialization", False], - ] - - # Expand the parameter list to the product of all combinations with "materialize person columns" - # e.g. [a, b] x [c, d] = [a, c], [a, d], [b, c], [b, d] - test_case_combinations = [ - [f"{name}{mat_option}", poe_v1, poe, mat_columns, mat_person] - for (name, poe_v1, poe, mat_columns), (mat_option, mat_person) in product( - settings_combinations, materialization_options - ) - ] - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_event_filter_with_person_properties_materialized( - self, - _name: str, - poe1_enabled: bool, - poe2_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - user_one = "test_event_filter_with_person_properties-user" - user_two = "test_event_filter_with_person_properties-user2" - session_id_one = f"test_event_filter_with_person_properties-1-{str(uuid4())}" - session_id_two = f"test_event_filter_with_person_properties-2-{str(uuid4())}" - - Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"}) - - self._add_replay_with_pageview(session_id_one, user_one) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - self._add_replay_with_pageview(session_id_two, user_two) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=(self.base_time + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - match_everyone_filter = SessionRecordingsFilter( - team=self.team, - data={"properties": []}, - ) - - session_recording_list_instance = SessionRecordingListFromReplaySummary( - filter=match_everyone_filter, team=self.team - ) - (session_recordings, _) = session_recording_list_instance.run() - - assert sorted([x["session_id"] for x in session_recordings]) == sorted([session_id_one, session_id_two]) - - match_bla_filter = SessionRecordingsFilter( - team=self.team, - data={ - "properties": [ - { - "key": "email", - "value": ["bla"], - "operator": "exact", - "type": "person", - } - ] - }, - ) - - session_recording_list_instance = SessionRecordingListFromReplaySummary( - filter=match_bla_filter, team=self.team - ) - (session_recordings, _) = session_recording_list_instance.run() - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - def _add_replay_with_pageview(self, session_id: str, user_one): - self.create_event( - user_one, - self.base_time, - properties={"$session_id": session_id, "$window_id": str(uuid4())}, - ) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id, - first_timestamp=self.base_time, - team_id=self.team.id, - ) - - @parameterized.expand(test_case_combinations) - @snapshot_clickhouse_queries - def test_person_id_filter( - self, - _name: str, - poe2_enabled: bool, - poe1_enabled: bool, - allow_denormalised_props: bool, - materialize_person_props: bool, - ) -> None: - # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) - # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos - # KLUDGE: for materialization on and off to test both sides the way the decorator would have - if materialize_person_props: - # it shouldn't matter to this test whether any column is materialized - # but let's keep the tests in this file similar so we flush out any unexpected interactions - materialize("events", "email", table_column="person_properties") - materialize("person", "email") - - with self.settings( - PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, - PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, - ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, - ): - three_user_ids = ["person-1-distinct-1", "person-1-distinct-2", "person-2"] - session_id_one = f"test_person_id_filter-session-one" - session_id_two = f"test_person_id_filter-session-two" - session_id_three = f"test_person_id_filter-session-three" - - p = Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[0], three_user_ids[1]], - properties={"email": "bla"}, - ) - Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[2]], - properties={"email": "bla2"}, - ) - - self._add_replay_with_pageview(session_id_one, three_user_ids[0]) - self._add_replay_with_pageview(session_id_two, three_user_ids[1]) - self._add_replay_with_pageview(session_id_three, three_user_ids[2]) - - filter = SessionRecordingsFilter(team=self.team, data={"person_uuid": str(p.uuid)}) - session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, _) = session_recording_list_instance.run() - assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) diff --git a/ee/urls.py b/ee/urls.py index 3cebde01fe365..0a5e0d9b63855 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -34,7 +34,6 @@ def extend_api_router( project_feature_flags_router: NestedRegistryItem, ) -> None: root_router.register(r"billing", billing.BillingViewset, "billing") - root_router.register(r"billing-v2", billing.BillingViewset, "billing") # Legacy transition route root_router.register(r"license", license.LicenseViewSet) root_router.register(r"time_to_see_data", time_to_see_data.TimeToSeeDataViewSet, "query_metrics") root_router.register(r"integrations", integration.PublicIntegrationViewSet) diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png new file mode 100644 index 0000000000000..5d30316a8b712 Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png new file mode 100644 index 0000000000000..46163479cd849 Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--dark.png new file mode 100644 index 0000000000000..7f91d47111807 Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--light.png b/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--light.png new file mode 100644 index 0000000000000..a8a58c75d714e Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recordings-play-list-no-pinned-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--dark.png new file mode 100644 index 0000000000000..7f91d47111807 Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--light.png b/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--light.png new file mode 100644 index 0000000000000..22110547d5591 Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--recordings-play-list-with-pinned-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png new file mode 100644 index 0000000000000..5f2d993d05efb Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png new file mode 100644 index 0000000000000..693947ce7669d Binary files /dev/null and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png index 26bb0b6f5e96f..43a834a6911c2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing--dark.png b/frontend/__snapshots__/scenes-other-billing-v2--billing--dark.png new file mode 100644 index 0000000000000..ad4b61f55a87a Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing--light.png b/frontend/__snapshots__/scenes-other-billing-v2--billing--light.png new file mode 100644 index 0000000000000..4f956d155b6c3 Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--dark.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--dark.png new file mode 100644 index 0000000000000..11fa60342f498 Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--light.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--light.png new file mode 100644 index 0000000000000..54b9a451c794d Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-discount--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--dark.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--dark.png new file mode 100644 index 0000000000000..70caced4d1b3d Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--light.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--light.png new file mode 100644 index 0000000000000..b8f13d3bd3afc Binary files /dev/null and b/frontend/__snapshots__/scenes-other-billing-v2--billing-with-limit-and-100-percent-discount--light.png differ diff --git a/frontend/src/lib/components/ConfirmUpgradeModal/confirmUpgradeModalLogic.ts b/frontend/src/lib/components/ConfirmUpgradeModal/confirmUpgradeModalLogic.ts index 48016a916b56e..a004d0a6a0e56 100644 --- a/frontend/src/lib/components/ConfirmUpgradeModal/confirmUpgradeModalLogic.ts +++ b/frontend/src/lib/components/ConfirmUpgradeModal/confirmUpgradeModalLogic.ts @@ -1,6 +1,6 @@ import { actions, kea, listeners, path, reducers } from 'kea' -import { BillingV2PlanType } from '~/types' +import { BillingPlanType } from '~/types' import type { confirmUpgradeModalLogicType } from './confirmUpgradeModalLogicType' @@ -8,7 +8,7 @@ export const confirmUpgradeModalLogic = kea([ path(['lib', 'components', 'ConfirmUpgradeModal', 'confirmUpgradeModalLogic']), actions({ showConfirmUpgradeModal: ( - upgradePlan: BillingV2PlanType, + upgradePlan: BillingPlanType, confirmCallback: () => void, cancelCallback: () => void ) => ({ @@ -22,7 +22,7 @@ export const confirmUpgradeModalLogic = kea([ }), reducers({ upgradePlan: [ - null as BillingV2PlanType | null, + null as BillingPlanType | null, { showConfirmUpgradeModal: (_, { upgradePlan }) => upgradePlan, hideConfirmUpgradeModal: () => null, diff --git a/frontend/src/lib/components/PayGateMini/PayGateButton.tsx b/frontend/src/lib/components/PayGateMini/PayGateButton.tsx index 26790cf7d138e..2f13b85e24c1c 100644 --- a/frontend/src/lib/components/PayGateMini/PayGateButton.tsx +++ b/frontend/src/lib/components/PayGateMini/PayGateButton.tsx @@ -4,14 +4,14 @@ import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic, FeatureFlagsSet } from 'lib/logic/featureFlagLogic' import { urls } from 'scenes/urls' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2FeatureType, BillingV2Type } from '~/types' +import { BillingFeatureType, BillingProductV2AddonType, BillingProductV2Type, BillingType } from '~/types' interface PayGateButtonProps { gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null productWithFeature: BillingProductV2AddonType | BillingProductV2Type - featureInfo: BillingV2FeatureType + featureInfo: BillingFeatureType onCtaClick: () => void - billing: BillingV2Type | null + billing: BillingType | null isAddonProduct?: boolean scrollToProduct: boolean } @@ -50,9 +50,9 @@ export const PayGateButton = ({ const getCtaLink = ( gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null, productWithFeature: BillingProductV2AddonType | BillingProductV2Type, - featureInfo: BillingV2FeatureType, + featureInfo: BillingFeatureType, featureFlags: FeatureFlagsSet, - subscriptionLevel?: BillingV2Type['subscription_level'], + subscriptionLevel?: BillingType['subscription_level'], isAddonProduct?: boolean, scrollToProduct: boolean = true ): string | undefined => { @@ -77,7 +77,7 @@ const getCtaLink = ( const getCtaLabel = ( gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null, - billing: BillingV2Type | null, + billing: BillingType | null, featureFlags: FeatureFlagsSet ): string => { if ( diff --git a/frontend/src/lib/components/PayGateMini/PayGateMini.tsx b/frontend/src/lib/components/PayGateMini/PayGateMini.tsx index c76e4a7c030d9..58f09073b783c 100644 --- a/frontend/src/lib/components/PayGateMini/PayGateMini.tsx +++ b/frontend/src/lib/components/PayGateMini/PayGateMini.tsx @@ -12,10 +12,10 @@ import { getProductIcon } from 'scenes/products/Products' import { AvailableFeature, + BillingFeatureType, BillingProductV2AddonType, BillingProductV2Type, - BillingV2FeatureType, - BillingV2Type, + BillingType, } from '~/types' import { upgradeModalLogic } from '../UpgradeModal/upgradeModalLogic' @@ -139,14 +139,14 @@ export function PayGateMini({ interface PayGateContentProps { className?: string background: boolean - featureInfo: BillingV2FeatureType - featureAvailableOnOrg?: BillingV2FeatureType | null + featureInfo: BillingFeatureType + featureAvailableOnOrg?: BillingFeatureType | null gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null productWithFeature: BillingProductV2AddonType | BillingProductV2Type isGrandfathered?: boolean isAddonProduct?: boolean - billing: BillingV2Type | null - featureInfoOnNextPlan?: BillingV2FeatureType + billing: BillingType | null + featureInfoOnNextPlan?: BillingFeatureType children: React.ReactNode handleCtaClick: () => void } @@ -197,12 +197,12 @@ function PayGateContent({ } const renderUsageLimitMessage = ( - featureAvailableOnOrg: BillingV2FeatureType | null | undefined, - featureInfoOnNextPlan: BillingV2FeatureType | undefined, + featureAvailableOnOrg: BillingFeatureType | null | undefined, + featureInfoOnNextPlan: BillingFeatureType | undefined, gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null, - featureInfo: BillingV2FeatureType, + featureInfo: BillingFeatureType, productWithFeature: BillingProductV2AddonType | BillingProductV2Type, - billing: BillingV2Type | null, + billing: BillingType | null, featureFlags: FeatureFlagsSet, isAddonProduct?: boolean, handleCtaClick?: () => void @@ -263,7 +263,7 @@ const renderUsageLimitMessage = ( const renderGateVariantMessage = ( gateVariant: 'add-card' | 'contact-sales' | 'move-to-cloud' | null, productWithFeature: BillingProductV2AddonType | BillingProductV2Type, - billing: BillingV2Type | null, + billing: BillingType | null, featureFlags: FeatureFlagsSet, isAddonProduct?: boolean ): JSX.Element => { diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index 013eab72eb24f..1d56a697bccc8 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -404,6 +404,10 @@ export const supportLogic = kea([ id: 22129191462555, value: posthog.get_distinct_id(), }, + { + id: 27031528411291, + value: userLogic?.values?.user?.organization?.id ?? '', + }, { id: 26073267652251, value: values.hasAvailableFeature(AvailableFeature.PRIORITY_SUPPORT) diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index cd74359799316..ce310a14fbe9c 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -194,7 +194,6 @@ export const FEATURE_FLAGS = { HEATMAPS_UI: 'heatmaps-ui', // owner: @benjackwhite THEME: 'theme', // owner: @aprilfools PLUGINS_FILTERING: 'plugins-filtering', // owner: @benjackwhite - SESSION_REPLAY_HOG_QL_FILTERING: 'session-replay-hogql-filtering', // owner: #team-replay INSIGHT_LOADING_BAR: 'insight-loading-bar', // owner: @aspicer PROXY_AS_A_SERVICE: 'proxy-as-a-service', // owner: #team-infrastructure LIVE_EVENTS: 'live-events', // owner: @zach or @jams @@ -210,6 +209,7 @@ export const FEATURE_FLAGS = { SETTINGS_BOUNCE_RATE_PAGE_VIEW_MODE: 'settings-bounce-rate-page-view-mode', // owner: @robbie-c SURVEYS_BRANCHING_LOGIC: 'surveys-branching-logic', // owner: @jurajmajerik #team-feature-success WEB_ANALYTICS_LIVE_USER_COUNT: 'web-analytics-live-user-count', // owner: @robbie-c + SETTINGS_SESSION_TABLE_VERSION: 'settings-session-table-version', // owner: @robbie-c } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index e798813d59c03..8150bfa869a96 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -395,6 +395,7 @@ export const eventUsageLogic = kea([ reportPoEModeUpdated: (mode: string) => ({ mode }), reportPersonsJoinModeUpdated: (mode: string) => ({ mode }), reportBounceRatePageViewModeUpdated: (mode: string) => ({ mode }), + reportSessionTableVersionUpdated: (version: string) => ({ version }), reportPropertySelectOpened: true, reportCreatedDashboardFromModal: true, reportSavedInsightToDashboard: true, @@ -810,6 +811,9 @@ export const eventUsageLogic = kea([ reportBounceRatePageViewModeUpdated: async ({ mode }) => { posthog.capture('bounce rate page view mode updated', { mode }) }, + reportSessionTableVersionUpdated: async ({ version }) => { + posthog.capture('session table version updated', { version }) + }, reportInsightFilterRemoved: async ({ index }) => { posthog.capture('local filter removed', { index }) }, diff --git a/frontend/src/mocks/features.ts b/frontend/src/mocks/features.ts index 10d4c23a7bb51..9ea2f81d6e530 100644 --- a/frontend/src/mocks/features.ts +++ b/frontend/src/mocks/features.ts @@ -1,10 +1,10 @@ -import { AvailableFeature, BillingV2FeatureType } from '~/types' +import { AvailableFeature, BillingFeatureType } from '~/types' let features: AvailableFeature[] = [] export const useAvailableFeatures = (f: AvailableFeature[]): void => { features = f } -export const getAvailableProductFeatures = (): BillingV2FeatureType[] => { +export const getAvailableProductFeatures = (): BillingFeatureType[] => { return features.map((feature) => { return { key: feature, diff --git a/frontend/src/mocks/fixtures/_billing.tsx b/frontend/src/mocks/fixtures/_billing.tsx index f8fa0010554ff..b5332d2b24aa1 100644 --- a/frontend/src/mocks/fixtures/_billing.tsx +++ b/frontend/src/mocks/fixtures/_billing.tsx @@ -1,8 +1,8 @@ import { dayjs } from 'lib/dayjs' -import { BillingV2Type } from '~/types' +import { BillingType } from '~/types' -export const billingJson: BillingV2Type = { +export const billingJson: BillingType = { customer_id: 'cus_Pg7PIL8MsKi6bx', deactivated: false, has_active_subscription: true, diff --git a/frontend/src/mocks/fixtures/_status_page_all_ok.json b/frontend/src/mocks/fixtures/_status_page_all_ok.json new file mode 100644 index 0000000000000..73bf18fde65b2 --- /dev/null +++ b/frontend/src/mocks/fixtures/_status_page_all_ok.json @@ -0,0 +1,382 @@ +{ + "page": { + "id": "1ll2gn427qjl", + "name": "PostHog", + "url": "https://status.posthog.com", + "time_zone": "Etc/UTC", + "updated_at": "2024-06-29T05:42:34.351Z" + }, + "components": [ + { + "id": "6v7xydh2714r", + "name": "App", + "status": "operational", + "created_at": "2020-08-03T22:28:14.454Z", + "updated_at": "2024-05-28T10:24:00.192Z", + "position": 1, + "description": "Frontend application and API servers", + "showcase": true, + "start_date": null, + "group_id": "41df083ftqt6", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "xtkq4whlbry9", + "name": "Update Service", + "status": "operational", + "created_at": "2022-11-21T21:30:16.813Z", + "updated_at": "2022-11-21T21:40:45.215Z", + "position": 1, + "description": null, + "showcase": true, + "start_date": "2022-08-01", + "group_id": "0cn1jr2pwdwm", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "j6bgzv8dh942", + "name": "PostHog.com", + "status": "operational", + "created_at": "2022-11-21T21:31:36.659Z", + "updated_at": "2023-04-28T19:15:51.255Z", + "position": 1, + "description": "Homepage, Docs, and Handbook", + "showcase": true, + "start_date": "2022-07-01", + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "y4bsf3hcl974", + "name": "AWS ec2-us-east-1", + "status": "operational", + "created_at": "2022-11-21T21:37:14.434Z", + "updated_at": "2024-03-26T22:17:17.790Z", + "position": 1, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "z0y6m9kyvy3j", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "fm3yyllyxhgj", + "name": "AWS elb-eu-central-1", + "status": "operational", + "created_at": "2022-11-21T21:38:24.779Z", + "updated_at": "2022-11-21T21:46:47.748Z", + "position": 1, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "nfknrn2bf3yz", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "gl055wcw583j", + "name": "App", + "status": "operational", + "created_at": "2022-11-21T21:39:13.887Z", + "updated_at": "2024-05-08T15:33:56.331Z", + "position": 1, + "description": "Frontend application and API servers", + "showcase": true, + "start_date": "2022-07-01", + "group_id": "c4d9jd1jcx3f", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "ylm2gzb49rr7", + "name": "License Server", + "status": "operational", + "created_at": "2022-11-21T21:34:54.421Z", + "updated_at": "2022-11-21T21:40:55.589Z", + "position": 2, + "description": null, + "showcase": true, + "start_date": "2022-07-01", + "group_id": "0cn1jr2pwdwm", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "cjgb5s1jqy56", + "name": "Event and Data Ingestion", + "status": "operational", + "created_at": "2022-11-21T21:35:18.114Z", + "updated_at": "2024-04-30T19:32:00.323Z", + "position": 2, + "description": null, + "showcase": true, + "start_date": "2022-07-01", + "group_id": "41df083ftqt6", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "ps354j1bvl04", + "name": "AWS elasticache-eu-central-1", + "status": "operational", + "created_at": "2022-11-21T21:38:04.615Z", + "updated_at": "2022-11-21T21:47:06.804Z", + "position": 2, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "nfknrn2bf3yz", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "t053nl3g63sw", + "name": "AWS elb-us-east-1", + "status": "operational", + "created_at": "2022-11-21T21:38:15.462Z", + "updated_at": "2024-06-26T03:49:53.357Z", + "position": 2, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "z0y6m9kyvy3j", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "41df083ftqt6", + "name": "US Cloud 🇺🇸", + "status": "operational", + "created_at": "2022-11-21T21:43:17.594Z", + "updated_at": "2022-11-22T01:55:51.439Z", + "position": 2, + "description": null, + "showcase": false, + "start_date": null, + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": true, + "only_show_if_degraded": false, + "components": ["6v7xydh2714r", "cjgb5s1jqy56", "1t4b8gf5psbc"] + }, + { + "id": "4tw78vnj4045", + "name": "Event and Data Ingestion", + "status": "operational", + "created_at": "2022-11-21T21:48:51.909Z", + "updated_at": "2024-05-27T23:47:54.117Z", + "position": 2, + "description": null, + "showcase": true, + "start_date": "2022-07-01", + "group_id": "c4d9jd1jcx3f", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "1t4b8gf5psbc", + "name": "Feature Flags and Experiments", + "status": "operational", + "created_at": "2022-11-21T21:35:49.794Z", + "updated_at": "2024-05-22T17:36:43.895Z", + "position": 3, + "description": "Decide endpoint", + "showcase": true, + "start_date": "2022-07-01", + "group_id": "41df083ftqt6", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "tkzg024s2zsc", + "name": "AWS rds-us-east-1", + "status": "operational", + "created_at": "2022-11-21T21:37:21.722Z", + "updated_at": "2023-06-13T21:28:10.603Z", + "position": 3, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "z0y6m9kyvy3j", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "9t36hjkpdffk", + "name": "AWS rds-eu-central-1", + "status": "operational", + "created_at": "2022-11-21T21:37:32.316Z", + "updated_at": "2022-11-21T21:47:25.843Z", + "position": 3, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "nfknrn2bf3yz", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "c4d9jd1jcx3f", + "name": "EU Cloud 🇪🇺", + "status": "operational", + "created_at": "2022-11-21T21:44:27.968Z", + "updated_at": "2022-11-21T21:45:39.939Z", + "position": 3, + "description": null, + "showcase": false, + "start_date": null, + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": true, + "only_show_if_degraded": false, + "components": ["gl055wcw583j", "4tw78vnj4045", "1mcb5g8sznnz"] + }, + { + "id": "1mcb5g8sznnz", + "name": "Feature Flags and Experiments", + "status": "operational", + "created_at": "2022-11-21T21:49:13.429Z", + "updated_at": "2024-05-22T17:36:43.909Z", + "position": 3, + "description": "Decide endpoint", + "showcase": true, + "start_date": "2022-07-01", + "group_id": "c4d9jd1jcx3f", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "7zz5gc65m794", + "name": "AWS ec2-eu-central-1", + "status": "operational", + "created_at": "2022-11-21T21:37:41.686Z", + "updated_at": "2022-11-21T21:47:44.353Z", + "position": 4, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "nfknrn2bf3yz", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "833w3100npw3", + "name": "AWS elasticache-us-east-1", + "status": "operational", + "created_at": "2022-11-21T21:37:50.482Z", + "updated_at": "2023-09-18T23:53:20.416Z", + "position": 4, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "z0y6m9kyvy3j", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "0cn1jr2pwdwm", + "name": "Support APIs", + "status": "operational", + "created_at": "2022-11-21T21:40:45.187Z", + "updated_at": "2022-11-22T01:55:51.456Z", + "position": 4, + "description": null, + "showcase": false, + "start_date": null, + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": true, + "only_show_if_degraded": false, + "components": ["xtkq4whlbry9", "ylm2gzb49rr7"] + }, + { + "id": "z0y6m9kyvy3j", + "name": "AWS US 🇺🇸", + "status": "operational", + "created_at": "2022-11-21T21:46:33.358Z", + "updated_at": "2022-11-22T01:55:51.473Z", + "position": 5, + "description": null, + "showcase": false, + "start_date": null, + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": true, + "only_show_if_degraded": false, + "components": ["y4bsf3hcl974", "t053nl3g63sw", "tkzg024s2zsc", "833w3100npw3", "rgyd1tg1jhcv"] + }, + { + "id": "rgyd1tg1jhcv", + "name": "AWS kafka-us-east-1", + "status": "operational", + "created_at": "2022-11-22T02:03:23.870Z", + "updated_at": "2023-09-29T04:43:36.890Z", + "position": 5, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "z0y6m9kyvy3j", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "20jjhvqn3xcq", + "name": "AWS kafka-eu-central-1", + "status": "operational", + "created_at": "2022-11-22T02:03:26.507Z", + "updated_at": "2022-11-22T02:04:08.037Z", + "position": 5, + "description": null, + "showcase": false, + "start_date": null, + "group_id": "nfknrn2bf3yz", + "page_id": "1ll2gn427qjl", + "group": false, + "only_show_if_degraded": false + }, + { + "id": "nfknrn2bf3yz", + "name": "AWS EU 🇪🇺", + "status": "operational", + "created_at": "2022-11-21T21:46:47.736Z", + "updated_at": "2022-11-22T01:55:51.491Z", + "position": 6, + "description": null, + "showcase": false, + "start_date": null, + "group_id": null, + "page_id": "1ll2gn427qjl", + "group": true, + "only_show_if_degraded": false, + "components": ["fm3yyllyxhgj", "ps354j1bvl04", "9t36hjkpdffk", "7zz5gc65m794", "20jjhvqn3xcq"] + } + ], + "incidents": [], + "scheduled_maintenances": [], + "status": { + "indicator": "none", + "description": "All Systems Operational" + } +} diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 885e116ef08da..f1c8f9bfc93f5 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -16,6 +16,7 @@ import { SharingConfigurationType } from '~/types' import { getAvailableProductFeatures } from './features' import { billingJson } from './fixtures/_billing' +import * as statusPageAllOK from './fixtures/_status_page_all_ok.json' import { Mocks, MockSignature, mocksToHandlers } from './utils' export const EMPTY_PAGINATED_RESPONSE = { count: 0, results: [] as any[], next: null, previous: null } @@ -123,6 +124,11 @@ export const defaultMocks: Mocks = { '/api/billing/': { ...billingJson, }, + '/api/billing/get_invoices': { + link: null, + count: 0, + }, + 'https://status.posthog.com/api/v2/summary.json': statusPageAllOK, }, post: { 'https://us.i.posthog.com/e/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), @@ -133,5 +139,8 @@ export const defaultMocks: Mocks = { '/api/projects/:team_id/insights/:insight_id/viewed/': (): MockSignature => [201, null], 'api/projects/:team_id/query': [200, { results: [] }], }, + patch: { + '/api/projects/:team_id/session_recording_playlists/:playlist_id/': {}, + }, } export const handlers = mocksToHandlers(defaultMocks) diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 7811c9e1e43bb..f00512ba8906e 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -2575,7 +2575,7 @@ "type": "string" } }, - "required": ["id", "name", "should_sync", "incremental", "status"], + "required": ["id", "name", "should_sync", "incremental"], "type": "object" }, "DatabaseSchemaSource": { @@ -5319,7 +5319,7 @@ "type": "object" }, "NodeKind": { - "description": "PostHog Query Schema definition.\n\nThis file acts as the source of truth for:\n\n- frontend/src/queries/schema.json - generated from typescript via \"pnpm run generate:schema:json\"\n\n- posthog/schema.py - generated from json the above json via \"pnpm run generate:schema:python\"", + "description": "PostHog Query Schema definition.\n\nThis file acts as the source of truth for:\n\n- frontend/src/queries/schema.json - generated from typescript via \"pnpm run schema:build:json\"\n\n- posthog/schema.py - generated from json the above json via \"pnpm run schema:build:python\"", "enum": [ "EventsNode", "ActionsNode", diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index cfbddedd9ce92..e34100a494b75 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -41,10 +41,10 @@ type integer = number * This file acts as the source of truth for: * * - frontend/src/queries/schema.json - * - generated from typescript via "pnpm run generate:schema:json" + * - generated from typescript via "pnpm run schema:build:json" * * - posthog/schema.py - * - generated from json the above json via "pnpm run generate:schema:python" + * - generated from json the above json via "pnpm run schema:build:python" * */ export enum NodeKind { @@ -1430,7 +1430,7 @@ export interface DatabaseSchemaSchema { name: string should_sync: boolean incremental: boolean - status: string + status?: string last_synced_at?: string } diff --git a/frontend/src/scenes/billing/AllProductsPlanComparison.tsx b/frontend/src/scenes/billing/AllProductsPlanComparison.tsx index 8809851118b84..2f6c7171acb68 100644 --- a/frontend/src/scenes/billing/AllProductsPlanComparison.tsx +++ b/frontend/src/scenes/billing/AllProductsPlanComparison.tsx @@ -11,7 +11,7 @@ import React, { useState } from 'react' import { getProductIcon } from 'scenes/products/Products' import useResizeObserver from 'use-resize-observer' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2FeatureType, BillingV2PlanType } from '~/types' +import { BillingFeatureType, BillingPlanType, BillingProductV2AddonType, BillingProductV2Type } from '~/types' import { convertLargeNumberToWords, getProration, getProrationMessage, getUpgradeProductLink } from './billing-utils' import { billingLogic } from './billingLogic' @@ -23,7 +23,7 @@ export function PlanIcon({ className, timeDenominator, }: { - feature?: BillingV2FeatureType + feature?: BillingFeatureType className?: string timeDenominator?: string }): JSX.Element { @@ -56,7 +56,7 @@ const PricingTiers = ({ plan, product, }: { - plan: BillingV2PlanType + plan: BillingPlanType product: BillingProductV2Type | BillingProductV2AddonType }): JSX.Element => { const { width, ref: tiersRef } = useResizeObserver() @@ -114,7 +114,7 @@ const PricingTiers = ({ * @param {string} plan.included_if - Condition for plan inclusion. * @returns {string} - The pricing description for the plan. */ -function getPlanDescription(plan: BillingV2PlanType): string { +function getPlanDescription(plan: BillingPlanType): string { if (plan.free_allocation && !plan.tiers) { return 'Free forever' } else if (plan.unit_amount_usd) { @@ -283,7 +283,7 @@ export const AllProductsPlanComparison = ({ {includedPlans - .find((plan: BillingV2PlanType) => plan.included_if == 'has_subscription') + .find((plan: BillingPlanType) => plan.included_if == 'has_subscription') ?.features?.map((feature) => ( // Inclusion product feature row @@ -521,13 +521,7 @@ export const AllProductsPlanComparisonModal = ({ ) } -const AddonPlanTiers = ({ - plan, - addon, -}: { - plan: BillingV2PlanType - addon: BillingProductV2AddonType -}): JSX.Element => { +const AddonPlanTiers = ({ plan, addon }: { plan: BillingPlanType; addon: BillingProductV2AddonType }): JSX.Element => { const [showTiers, setShowTiers] = useState(false) return showTiers ? ( diff --git a/frontend/src/scenes/billing/Billing.stories.tsx b/frontend/src/scenes/billing/Billing.stories.tsx index 139cc156c4bc1..43ce5c0aa362d 100644 --- a/frontend/src/scenes/billing/Billing.stories.tsx +++ b/frontend/src/scenes/billing/Billing.stories.tsx @@ -32,7 +32,7 @@ const meta: Meta = { ], } export default meta -export const _BillingV2 = (): JSX.Element => { +export const _Billing = (): JSX.Element => { useStorybookMocks({ get: { '/api/billing/': { @@ -44,7 +44,7 @@ export const _BillingV2 = (): JSX.Element => { return } -export const BillingV2WithDiscount = (): JSX.Element => { +export const BillingWithDiscount = (): JSX.Element => { useStorybookMocks({ get: { '/api/billing/': { @@ -56,7 +56,7 @@ export const BillingV2WithDiscount = (): JSX.Element => { return } -export const BillingV2WithLimitAnd100PercentDiscount = (): JSX.Element => { +export const BillingWithLimitAnd100PercentDiscount = (): JSX.Element => { useStorybookMocks({ get: { '/api/billing/': { diff --git a/frontend/src/scenes/billing/Billing.tsx b/frontend/src/scenes/billing/Billing.tsx index 7052186af73f6..084dd201ced20 100644 --- a/frontend/src/scenes/billing/Billing.tsx +++ b/frontend/src/scenes/billing/Billing.tsx @@ -43,7 +43,7 @@ export function Billing(): JSX.Element { isAnnualPlan, billingError, } = useValues(billingLogic) - const { reportBillingV2Shown } = useActions(billingLogic) + const { reportBillingShown } = useActions(billingLogic) const { preflight, isCloudOrDev } = useValues(preflightLogic) const { openSupportForm } = useActions(supportLogic) const { featureFlags } = useValues(featureFlagLogic) @@ -54,7 +54,7 @@ export function Billing(): JSX.Element { useEffect(() => { if (billing) { - reportBillingV2Shown() + reportBillingShown() } }, [!!billing]) diff --git a/frontend/src/scenes/billing/BillingProduct.tsx b/frontend/src/scenes/billing/BillingProduct.tsx index be083745e9fea..8654f18d72c17 100644 --- a/frontend/src/scenes/billing/BillingProduct.tsx +++ b/frontend/src/scenes/billing/BillingProduct.tsx @@ -15,7 +15,7 @@ import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { useRef } from 'react' import { getProductIcon } from 'scenes/products/Products' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2TierType } from '~/types' +import { BillingProductV2AddonType, BillingProductV2Type, BillingTierType } from '~/types' import { convertLargeNumberToWords, getUpgradeProductLink, summarizeUsage } from './billing-utils' import { BillingGauge } from './BillingGauge' @@ -29,7 +29,7 @@ import { ProductPricingModal } from './ProductPricingModal' import { UnsubscribeSurveyModal } from './UnsubscribeSurveyModal' export const getTierDescription = ( - tiers: BillingV2TierType[], + tiers: BillingTierType[], i: number, product: BillingProductV2Type | BillingProductV2AddonType, interval: string diff --git a/frontend/src/scenes/billing/PlanComparison.tsx b/frontend/src/scenes/billing/PlanComparison.tsx index 741609879ffd2..c12dd34f96f0f 100644 --- a/frontend/src/scenes/billing/PlanComparison.tsx +++ b/frontend/src/scenes/billing/PlanComparison.tsx @@ -13,7 +13,7 @@ import React, { useState } from 'react' import { getProductIcon } from 'scenes/products/Products' import useResizeObserver from 'use-resize-observer' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2FeatureType, BillingV2PlanType } from '~/types' +import { BillingFeatureType, BillingPlanType, BillingProductV2AddonType, BillingProductV2Type } from '~/types' import { convertLargeNumberToWords, getProration, getUpgradeProductLink } from './billing-utils' import { billingLogic } from './billingLogic' @@ -25,7 +25,7 @@ export function PlanIcon({ className, timeDenominator, }: { - feature?: BillingV2FeatureType + feature?: BillingFeatureType className?: string timeDenominator?: string }): JSX.Element { @@ -58,7 +58,7 @@ const PricingTiers = ({ plan, product, }: { - plan: BillingV2PlanType + plan: BillingPlanType product: BillingProductV2Type | BillingProductV2AddonType }): JSX.Element => { const { width, ref: tiersRef } = useResizeObserver() @@ -426,7 +426,7 @@ export const PlanComparison = ({ {includedPlans - .find((plan: BillingV2PlanType) => plan.included_if == 'has_subscription') + .find((plan: BillingPlanType) => plan.included_if == 'has_subscription') ?.features?.map((feature, i) => ( { +const AddonPlanTiers = ({ plan, addon }: { plan: BillingPlanType; addon: BillingProductV2AddonType }): JSX.Element => { const [showTiers, setShowTiers] = useState(false) return showTiers ? ( diff --git a/frontend/src/scenes/billing/ProductPricingModal.tsx b/frontend/src/scenes/billing/ProductPricingModal.tsx index fc372d232d3e7..49e3356371710 100644 --- a/frontend/src/scenes/billing/ProductPricingModal.tsx +++ b/frontend/src/scenes/billing/ProductPricingModal.tsx @@ -1,7 +1,7 @@ import { LemonModal } from '@posthog/lemon-ui' import { capitalizeFirstLetter } from 'lib/utils' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2PlanType } from '~/types' +import { BillingPlanType, BillingProductV2AddonType, BillingProductV2Type } from '~/types' import { getTierDescription } from './BillingProduct' @@ -19,7 +19,7 @@ export const ProductPricingModal = ({ if (!planKey) { return null } - const tiers = product?.plans?.find((plan: BillingV2PlanType) => plan.plan_key === planKey)?.tiers + const tiers = product?.plans?.find((plan: BillingPlanType) => plan.plan_key === planKey)?.tiers if (!product || !tiers) { return null diff --git a/frontend/src/scenes/billing/billing-utils.ts b/frontend/src/scenes/billing/billing-utils.ts index 49457eb9a6a16..e0098f710403b 100644 --- a/frontend/src/scenes/billing/billing-utils.ts +++ b/frontend/src/scenes/billing/billing-utils.ts @@ -2,7 +2,7 @@ import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { FeatureFlagsSet } from 'lib/logic/featureFlagLogic' -import { BillingProductV2Type, BillingV2TierType, BillingV2Type } from '~/types' +import { BillingProductV2Type, BillingTierType, BillingType } from '~/types' export const summarizeUsage = (usage: number | null): string => { if (usage === null) { @@ -15,10 +15,7 @@ export const summarizeUsage = (usage: number | null): string => { return `${Math.round(usage / 1000000)} million` } -export const projectUsage = ( - usage: number | undefined, - period: BillingV2Type['billing_period'] -): number | undefined => { +export const projectUsage = (usage: number | undefined, period: BillingType['billing_period']): number | undefined => { if (typeof usage === 'undefined') { return usage } @@ -39,7 +36,7 @@ export const projectUsage = ( export const convertUsageToAmount = ( usage: number, - productAndAddonTiers: BillingV2TierType[][], + productAndAddonTiers: BillingTierType[][], percentDiscount?: number ): string => { if (!productAndAddonTiers) { @@ -47,7 +44,7 @@ export const convertUsageToAmount = ( } let remainingUsage = usage let amount = 0 - let previousTier: BillingV2TierType | undefined = undefined + let previousTier: BillingTierType | undefined = undefined const tiers = productAndAddonTiers[0].map((tier, index) => { const allAddonsTiers = productAndAddonTiers.slice(1) @@ -91,7 +88,7 @@ export const convertUsageToAmount = ( export const convertAmountToUsage = ( amount: string, - productAndAddonTiers: BillingV2TierType[][], + productAndAddonTiers: BillingTierType[][], discountPercent?: number ): number => { if (!amount) { @@ -118,7 +115,7 @@ export const convertAmountToUsage = ( let remainingAmount = parseFloat(amount) let usage = 0 - let previousTier: BillingV2TierType | undefined = undefined + let previousTier: BillingTierType | undefined = undefined if (remainingAmount === 0) { if (parseFloat(tiers[0].unit_amount_usd) === 0) { @@ -172,7 +169,7 @@ export const getUpgradeProductLink = ({ upgradeToPlanKey: string redirectPath?: string includeAddons: boolean - subscriptionLevel?: BillingV2Type['subscription_level'] + subscriptionLevel?: BillingType['subscription_level'] featureFlags: FeatureFlagsSet }): string => { let url = '/api/billing/activate?' diff --git a/frontend/src/scenes/billing/billingLogic.tsx b/frontend/src/scenes/billing/billingLogic.tsx index 30604688729a5..ec2d3718bc61d 100644 --- a/frontend/src/scenes/billing/billingLogic.tsx +++ b/frontend/src/scenes/billing/billingLogic.tsx @@ -15,7 +15,7 @@ import posthog from 'posthog-js' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { userLogic } from 'scenes/userLogic' -import { BillingProductV2Type, BillingV2PlanType, BillingV2Type, ProductKey } from '~/types' +import { BillingPlanType, BillingProductV2Type, BillingType, ProductKey } from '~/types' import type { billingLogicType } from './billingLogicType' @@ -51,7 +51,7 @@ export interface BillingError { action: LemonButtonPropsBase } -const parseBillingResponse = (data: Partial): BillingV2Type => { +const parseBillingResponse = (data: Partial): BillingType => { if (data.billing_period) { data.billing_period = { current_period_start: dayjs(data.billing_period.current_period_start), @@ -72,7 +72,7 @@ const parseBillingResponse = (data: Partial): BillingV2Type => { data.amount_off_expires_at = data.billing_period.current_period_end } - return data as BillingV2Type + return data as BillingType } export const billingLogic = kea([ @@ -83,7 +83,7 @@ export const billingLogic = kea([ setShowLicenseDirectInput: (show: boolean) => ({ show }), reportBillingAlertShown: (alertConfig: BillingAlertConfig) => ({ alertConfig }), reportBillingAlertActionClicked: (alertConfig: BillingAlertConfig) => ({ alertConfig }), - reportBillingV2Shown: true, + reportBillingShown: true, registerInstrumentationProps: true, setRedirectPath: true, setIsOnboarding: true, @@ -182,7 +182,7 @@ export const billingLogic = kea([ }), loaders(({ actions, values }) => ({ billing: [ - null as BillingV2Type | null, + null as BillingType | null, { loadBilling: async () => { const response = await api.get('api/billing') @@ -298,7 +298,7 @@ export const billingLogic = kea([ ], projectedTotalAmountUsdWithBillingLimits: [ (s) => [s.billing], - (billing: BillingV2Type): number => { + (billing: BillingType): number => { if (!billing) { return 0 } @@ -340,7 +340,7 @@ export const billingLogic = kea([ ], supportPlans: [ (s) => [s.billing], - (billing: BillingV2Type): BillingV2PlanType[] => { + (billing: BillingType): BillingPlanType[] => { const platformAndSupportProduct = billing?.products?.find( (product) => product.type == ProductKey.PLATFORM_AND_SUPPORT ) @@ -357,7 +357,7 @@ export const billingLogic = kea([ ], hasSupportAddonPlan: [ (s) => [s.billing], - (billing: BillingV2Type): boolean => { + (billing: BillingType): boolean => { return !!billing?.products ?.find((product) => product.type == ProductKey.PLATFORM_AND_SUPPORT) ?.addons.find((addon) => addon.plans.find((plan) => plan.current_plan)) @@ -394,7 +394,7 @@ export const billingLogic = kea([ }, })), listeners(({ actions, values }) => ({ - reportBillingV2Shown: () => { + reportBillingShown: () => { posthog.capture('billing v2 shown') }, reportBillingAlertShown: ({ alertConfig }) => { diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index e365ca8fa2e72..2441c100ad43c 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -6,7 +6,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import posthog from 'posthog-js' import React from 'react' -import { BillingProductV2AddonType, BillingProductV2Type, BillingV2PlanType, BillingV2TierType } from '~/types' +import { BillingPlanType, BillingProductV2AddonType, BillingProductV2Type, BillingTierType } from '~/types' import { convertAmountToUsage } from './billing-utils' import { billingLogic } from './billingLogic' @@ -63,7 +63,7 @@ export const billingProductLogic = kea([ setBillingProductLoading: (productKey: string | null) => ({ productKey }), initiateProductUpgrade: ( product: BillingProductV2Type | BillingProductV2AddonType, - plan: BillingV2PlanType, + plan: BillingPlanType, redirectPath?: string ) => ({ plan, @@ -156,7 +156,7 @@ export const billingProductLogic = kea([ currentAndUpgradePlans: [ (_s, p) => [p.product], (product) => { - const currentPlanIndex = product.plans.findIndex((plan: BillingV2PlanType) => plan.current_plan) + const currentPlanIndex = product.plans.findIndex((plan: BillingPlanType) => plan.current_plan) const currentPlan = currentPlanIndex >= 0 ? product.plans?.[currentPlanIndex] : null const upgradePlan = // If in debug mode and with no license there will be @@ -188,9 +188,9 @@ export const billingProductLogic = kea([ const addonTiers = product.addons ?.filter((addon: BillingProductV2AddonType) => addon.subscribed) ?.map((addon: BillingProductV2AddonType) => addon.tiers) - const productAndAddonTiers: BillingV2TierType[][] = [product.tiers, ...addonTiers].filter( + const productAndAddonTiers: BillingTierType[][] = [product.tiers, ...addonTiers].filter( Boolean - ) as BillingV2TierType[][] + ) as BillingTierType[][] return product.tiers ? isEditingBillingLimit ? convertAmountToUsage( @@ -355,9 +355,9 @@ export const billingProductLogic = kea([ ?.map((addon: BillingProductV2AddonType) => addon.tiers) : [] - const productAndAddonTiers: BillingV2TierType[][] = [props.product.tiers, ...addonTiers].filter( + const productAndAddonTiers: BillingTierType[][] = [props.product.tiers, ...addonTiers].filter( Boolean - ) as BillingV2TierType[][] + ) as BillingTierType[][] const newAmountAsUsage = props.product.tiers ? convertAmountToUsage(`${input}`, productAndAddonTiers, values.billing?.discount_percent) diff --git a/frontend/src/scenes/onboarding/OnboardingProductIntroduction.tsx b/frontend/src/scenes/onboarding/OnboardingProductIntroduction.tsx index 75924b11061eb..8a410d91471dc 100644 --- a/frontend/src/scenes/onboarding/OnboardingProductIntroduction.tsx +++ b/frontend/src/scenes/onboarding/OnboardingProductIntroduction.tsx @@ -12,13 +12,13 @@ import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { getProductIcon } from 'scenes/products/Products' import { userLogic } from 'scenes/userLogic' -import { BillingProductV2Type, BillingV2FeatureType, ProductKey } from '~/types' +import { BillingFeatureType, BillingProductV2Type, ProductKey } from '~/types' import { onboardingLogic, OnboardingStepKey } from './onboardingLogic' import { OnboardingStep } from './OnboardingStep' import { multiInstallProducts, sdksLogic } from './sdks/sdksLogic' -export const Feature = ({ name, description, images }: BillingV2FeatureType): JSX.Element => { +export const Feature = ({ name, description, images }: BillingFeatureType): JSX.Element => { return images ? (
  • @@ -32,7 +32,7 @@ export const Feature = ({ name, description, images }: BillingV2FeatureType): JS ) } -export const Subfeature = ({ name, description, icon_key }: BillingV2FeatureType): JSX.Element => { +export const Subfeature = ({ name, description, icon_key }: BillingFeatureType): JSX.Element => { return (
  • {getProductIcon(name, icon_key)} diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx index f520ab947b815..39b6bbd3d9980 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx @@ -14,7 +14,6 @@ import recordings from './__mocks__/recordings.json' const meta: Meta = { title: 'Replay/Player/Success', - tags: ['test-skip'], // TODO: Fix the flakey rendering due to player playback parameters: { layout: 'fullscreen', viewMode: 'story', @@ -118,37 +117,62 @@ const meta: Meta = { }, }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/projects/:team/query': (req, res, ctx) => { + const body = req.body as Record + + if ( + body.query.kind === 'HogQLQuery' && + body.query.query.startsWith( + 'SELECT properties.$session_id as session_id, any(properties) as properties' + ) + ) { + return res(ctx.json({ results: [['session_id_one', '{}']] })) + } + + if (body.query.kind === 'EventsQuery' && body.query.properties.length === 1) { + return res(ctx.json(recordingEventsJson)) + } + + // default to an empty response or we duplicate information + return res(ctx.json({ results: [] })) + }, }, }), ], } export default meta +const sceneUrl = (url: string, searchParams: Record = {}): string => + combineUrl(url, { + pause: true, + t: 7, + ...searchParams, + }).url + export function RecentRecordings(): JSX.Element { useEffect(() => { - router.actions.push(urls.replay()) + router.actions.push(sceneUrl(urls.replay())) }, []) return } export function RecordingsPlayListNoPinnedRecordings(): JSX.Element { useEffect(() => { - router.actions.push(urls.replayPlaylist('abcdefg')) + router.actions.push(sceneUrl(urls.replayPlaylist('abcdefg'))) }, []) return } export function RecordingsPlayListWithPinnedRecordings(): JSX.Element { useEffect(() => { - router.actions.push(urls.replayPlaylist('1234567')) + router.actions.push(sceneUrl(urls.replayPlaylist('1234567'))) }, []) return } export function SecondRecordingInList(): JSX.Element { useEffect(() => { - router.actions.push(combineUrl(urls.replay(), undefined, { sessionRecordingId: recordings[1].id }).url) + router.actions.push(sceneUrl(urls.replay(), { sessionRecordingId: recordings[1].id })) }, []) return } diff --git a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx index f34cd0cef2cb9..0a7917cfc4eae 100644 --- a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx +++ b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx @@ -4,9 +4,7 @@ import { DateFilter } from 'lib/components/DateFilter/DateFilter' import { PropertyFilters } from 'lib/components/PropertyFilters/PropertyFilters' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { TestAccountFilterSwitch } from 'lib/components/TestAccountFiltersSwitch' -import { FEATURE_FLAGS } from 'lib/constants' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' -import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { defaultRecordingDurationFilter } from 'scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic' @@ -74,8 +72,6 @@ export const AdvancedSessionRecordingsFilters = ({ }): JSX.Element => { const { groupsTaxonomicTypes } = useValues(groupsModel) - const { featureFlags } = useValues(featureFlagLogic) - const allowedPropertyTaxonomyTypes = [ TaxonomicFilterGroupType.EventProperties, TaxonomicFilterGroupType.EventFeatureFlags, @@ -84,16 +80,10 @@ export const AdvancedSessionRecordingsFilters = ({ ...groupsTaxonomicTypes, ] - const hasHogQLFiltering = featureFlags[FEATURE_FLAGS.SESSION_REPLAY_HOG_QL_FILTERING] - - if (hasHogQLFiltering) { - allowedPropertyTaxonomyTypes.push(TaxonomicFilterGroupType.SessionProperties) - } + allowedPropertyTaxonomyTypes.push(TaxonomicFilterGroupType.SessionProperties) const addFilterTaxonomyTypes = [TaxonomicFilterGroupType.PersonProperties, TaxonomicFilterGroupType.Cohorts] - if (hasHogQLFiltering) { - addFilterTaxonomyTypes.push(TaxonomicFilterGroupType.SessionProperties) - } + addFilterTaxonomyTypes.push(TaxonomicFilterGroupType.SessionProperties) return (
    @@ -125,15 +115,9 @@ export const AdvancedSessionRecordingsFilters = ({ buttonProps={{ type: 'secondary', size: 'small' }} /> - {hasHogQLFiltering ? ( - - Properties - - ) : ( - - Persons and cohorts - - )} + + Properties + { diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index d2cee712bcbb2..b37c30ed8cca4 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -20,7 +20,7 @@ import { subscriptions } from 'kea-subscriptions' import { delay } from 'kea-test-utils' import { now } from 'lib/dayjs' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { clamp, downloadFile, fromParamsGivenUrl } from 'lib/utils' +import { clamp, downloadFile } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { wrapConsole } from 'lib/utils/wrapConsole' import posthog from 'posthog-js' @@ -621,7 +621,7 @@ export const sessionRecordingPlayerLogic = kea( // Check for the "t" search param in the url on first load if (!cache.hasInitialized) { cache.hasInitialized = true - const searchParams = fromParamsGivenUrl(window.location.search) + const searchParams = router.values.searchParams if (searchParams.timestamp) { const desiredStartTime = Number(searchParams.timestamp) actions.seekToTimestamp(desiredStartTime, true) @@ -673,6 +673,29 @@ export const sessionRecordingPlayerLogic = kea( if (props.autoPlay) { // Autoplay assumes we are playing immediately so lets go ahead and load more data actions.setPlay() + + if (router.values.searchParams.pause) { + setTimeout(() => { + /** KLUDGE: when loaded for visual regression tests we want to pause the player + ** but only after it has had time to buffer and show the frame + * + * Frustratingly if we start paused we never process the data, + * so the player frame is just a black square. + * + * If we play (the default behaviour) and then stop after its processed the data + * then we see the player screen + * and can assert that _at least_ the full snapshot has been processed + * (i.e. we didn't completely break rrweb playback) + * + * We have to be paused so that the visual regression snapshot doesn't flap + * (because of the seekbar timestamp changing) + * + * And don't want to be at 0, so we can see that the seekbar + * at least paints the "played" portion of the recording correctly + **/ + actions.setPause() + }, 100) + } } }, diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index 0b66d4bb34062..2ae00e988273f 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -287,7 +287,6 @@ export const sessionRecordingsPlaylistLogic = kea [s.featureFlags], - (featureFlags) => !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_HOG_QL_FILTERING], - ], useUniversalFiltering: [ (s) => [s.featureFlags], (featureFlags) => !!featureFlags[FEATURE_FLAGS.SESSION_REPLAY_UNIVERSAL_FILTERS], @@ -736,6 +731,7 @@ export const sessionRecordingsPlaylistLogic = kea { const params: Params = objectClean({ + ...router.values.searchParams, simpleFilters: values.simpleFilters ?? undefined, advancedFilters: values.advancedFilters ?? undefined, sessionRecordingId: values.selectedRecordingId ?? undefined, diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx index ee7c6c3b363b5..41c9f83b878b8 100644 --- a/frontend/src/scenes/settings/SettingsMap.tsx +++ b/frontend/src/scenes/settings/SettingsMap.tsx @@ -1,6 +1,7 @@ import { BounceRatePageViewModeSetting } from 'scenes/settings/project/BounceRatePageViewMode' import { PersonsJoinMode } from 'scenes/settings/project/PersonsJoinMode' import { PersonsOnEvents } from 'scenes/settings/project/PersonsOnEvents' +import { SessionsTableVersion } from 'scenes/settings/project/SessionsTableVersion' import { Invites } from './organization/Invites' import { Members } from './organization/Members' @@ -167,6 +168,12 @@ export const SettingsMap: SettingSection[] = [ component: , flag: 'SETTINGS_BOUNCE_RATE_PAGE_VIEW_MODE', }, + { + id: 'session-table-version', + title: 'Sessions Table Version', + component: , + flag: 'SETTINGS_SESSION_TABLE_VERSION', + }, ], }, diff --git a/frontend/src/scenes/settings/project/SessionsTableVersion.tsx b/frontend/src/scenes/settings/project/SessionsTableVersion.tsx new file mode 100644 index 0000000000000..368e9d1ef71ea --- /dev/null +++ b/frontend/src/scenes/settings/project/SessionsTableVersion.tsx @@ -0,0 +1,75 @@ +import { useActions, useValues } from 'kea' +import { LemonButton } from 'lib/lemon-ui/LemonButton' +import { LemonRadio, LemonRadioOption } from 'lib/lemon-ui/LemonRadio' +import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { useState } from 'react' +import { teamLogic } from 'scenes/teamLogic' + +import { HogQLQueryModifiers } from '~/queries/schema' + +type SessionTableVersion = NonNullable + +const bounceRatePageViewModeOptions: LemonRadioOption[] = [ + { + value: 'auto', + label: ( + <> +
    Auto
    + + ), + }, + { + value: 'v1', + label: ( + <> +
    Version 1
    + + ), + }, + { + value: 'v2', + label: ( + <> +
    Version 2
    + + ), + }, +] + +export function SessionsTableVersion(): JSX.Element { + const { updateCurrentTeam } = useActions(teamLogic) + const { currentTeam } = useValues(teamLogic) + const { reportSessionTableVersionUpdated } = useActions(eventUsageLogic) + + const savedSessionTableVersion = + currentTeam?.modifiers?.sessionTableVersion ?? currentTeam?.default_modifiers?.sessionTableVersion ?? 'auto' + const [sessionTableVersion, setSessionTableVersion] = useState(savedSessionTableVersion) + + const handleChange = (version: SessionTableVersion): void => { + updateCurrentTeam({ modifiers: { ...currentTeam?.modifiers, sessionTableVersion: version } }) + reportSessionTableVersionUpdated(version) + } + + return ( + <> +

    + Choose which version of the session table to use. V2 is faster, but requires uuidv7 session ids. Use + auto unless you know what you're doing. +

    + +
    + handleChange(sessionTableVersion)} + disabledReason={sessionTableVersion === savedSessionTableVersion ? 'No changes to save' : undefined} + > + Save + +
    + + ) +} diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index d63d797ef536a..35b01185e1986 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -82,6 +82,7 @@ export type SettingId = | 'hedgehog-mode' | 'persons-join-mode' | 'bounce-rate-page-view-mode' + | 'session-table-version' type FeatureFlagKey = keyof typeof FEATURE_FLAGS diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 4d99bb15b5d8c..2df0d9875fba9 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -115,6 +115,7 @@ export const urls = { replaySingle: (id: string, filters?: Partial): string => combineUrl(`/replay/${id}`, filters ? { filters } : {}).url, replayFilePlayback: (): string => combineUrl('/replay/file-playback').url, + personByDistinctId: (id: string, encode: boolean = true): string => encode ? `/person/${encodeURIComponent(id)}` : `/person/${id}`, personByUUID: (uuid: string, encode: boolean = true): string => diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx index 6a9effb8e2e74..4cd3be6c9cfbf 100644 --- a/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/WebAnalyticsTile.tsx @@ -332,7 +332,7 @@ export const WebStatsTableTile = ({ const { key, type } = webStatsBreakdownToPropertyName(breakdownBy) || {} const onClick = useCallback( - (breakdownValue: string) => { + (breakdownValue: string | null) => { if (!key || !type) { return } @@ -400,7 +400,7 @@ export const WebStatsTableTile = ({ ) } -const getBreakdownValue = (record: unknown, breakdownBy: WebStatsBreakdown): string | undefined => { +const getBreakdownValue = (record: unknown, breakdownBy: WebStatsBreakdown): string | null | undefined => { if (typeof record !== 'object' || !record || !('result' in record)) { return undefined } @@ -429,6 +429,10 @@ const getBreakdownValue = (record: unknown, breakdownBy: WebStatsBreakdown): str break } + if (breakdownValue === null) { + return null // null is a valid value, as opposed to undefined which signals that there isn't a valid value + } + if (typeof breakdownValue !== 'string') { return undefined } diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index 23c835cce9572..e08193a7fd83b 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -187,7 +187,7 @@ export const webAnalyticsLogic = kea([ togglePropertyFilter: ( type: PropertyFilterType.Event | PropertyFilterType.Person | PropertyFilterType.Session, key: string, - value: string | number, + value: string | number | null, tabChange?: { graphsTab?: string sourceTab?: string @@ -245,6 +245,25 @@ export const webAnalyticsLogic = kea([ { setWebAnalyticsFilters: (_, { webAnalyticsFilters }) => webAnalyticsFilters, togglePropertyFilter: (oldPropertyFilters, { key, value, type }): WebAnalyticsPropertyFilters => { + if (value === null) { + // if there's already an isNotSet filter, remove it + const isNotSetFilterExists = oldPropertyFilters.some( + (f) => f.type === type || f.key === key || f.operator === PropertyOperator.IsNotSet + ) + if (isNotSetFilterExists) { + return oldPropertyFilters.filter( + (f) => f.type !== type || f.key !== key || f.operator !== PropertyOperator.IsNotSet + ) + } + return [ + ...oldPropertyFilters, + { + type, + key, + operator: PropertyOperator.IsNotSet, + }, + ] + } const similarFilterExists = oldPropertyFilters.some( (f) => f.type === type && f.key === key && f.operator === PropertyOperator.Exact ) @@ -252,7 +271,11 @@ export const webAnalyticsLogic = kea([ // if there's already a matching property, turn it off or merge them return oldPropertyFilters .map((f) => { - if (f.key !== key || f.type !== type || f.operator !== PropertyOperator.Exact) { + if ( + f.key !== key || + f.type !== type || + ![PropertyOperator.Exact, PropertyOperator.IsNotSet].includes(f.operator) + ) { return f } const oldValue = (Array.isArray(f.value) ? f.value : [f.value]).filter(isNotNil) diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 4fe44d361ef4f..f62f94ca17980 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -345,7 +345,7 @@ export interface OrganizationType extends OrganizationBasicType { updated_at: string plugins_access_level: PluginsAccessLevel teams: TeamBasicType[] - available_product_features: BillingV2FeatureType[] + available_product_features: BillingFeatureType[] is_member_join_email_enabled: boolean customer_id: string | null enforce_2fa: boolean | null @@ -1490,7 +1490,7 @@ export interface CurrentBillCycleType { current_period_end: number } -export type BillingV2FeatureType = { +export type BillingFeatureType = { key: AvailableFeatureUnion name: string description?: string | null @@ -1506,7 +1506,7 @@ export type BillingV2FeatureType = { type?: 'primary' | 'secondary' | null } -export interface BillingV2TierType { +export interface BillingTierType { flat_amount_usd: string unit_amount_usd: string current_amount_usd: string | null @@ -1529,7 +1529,7 @@ export interface BillingProductV2Type { docs_url: string free_allocation?: number | null subscribed: boolean | null - tiers?: BillingV2TierType[] | null + tiers?: BillingTierType[] | null tiered: boolean current_usage?: number projected_amount_usd?: string | null @@ -1541,10 +1541,10 @@ export interface BillingProductV2Type { has_exceeded_limit: boolean unit: string | null unit_amount_usd: string | null - plans: BillingV2PlanType[] + plans: BillingPlanType[] contact_support: boolean | null inclusion_only: any - features: BillingV2FeatureType[] + features: BillingFeatureType[] addons: BillingProductV2AddonType[] // addons-only: if this addon is included with the base product and not subscribed individually. for backwards compatibility. included_with_main_product?: boolean @@ -1558,7 +1558,7 @@ export interface BillingProductV2AddonType { icon_key?: string docs_url: string | null type: string - tiers: BillingV2TierType[] | null + tiers: BillingTierType[] | null tiered: boolean subscribed: boolean // sometimes addons are included with the base product, but they aren't subscribed individually @@ -1571,15 +1571,15 @@ export interface BillingProductV2AddonType { current_usage: number projected_usage: number | null projected_amount_usd: string | null - plans: BillingV2PlanType[] + plans: BillingPlanType[] usage_key?: string free_allocation?: number | null percentage_usage?: number - features: BillingV2FeatureType[] + features: BillingFeatureType[] included_if?: 'no_active_subscription' | 'has_subscription' | null usage_limit?: number | null } -export interface BillingV2Type { +export interface BillingType { customer_id: string has_active_subscription: boolean subscription_level: 'free' | 'paid' | 'custom' @@ -1601,15 +1601,15 @@ export interface BillingV2Type { license?: { plan: LicensePlan } - available_plans?: BillingV2PlanType[] + available_plans?: BillingPlanType[] discount_percent?: number discount_amount_usd?: string amount_off_expires_at?: Dayjs } -export interface BillingV2PlanType { +export interface BillingPlanType { free_allocation?: number | null - features: BillingV2FeatureType[] + features: BillingFeatureType[] name: string description: string is_free?: boolean @@ -1621,7 +1621,7 @@ export interface BillingV2PlanType { flat_rate: boolean product_key: ProductKeyUnion current_plan?: boolean | null - tiers?: BillingV2TierType[] | null + tiers?: BillingTierType[] | null unit_amount_usd: string | null included_if?: 'no_active_subscription' | 'has_subscription' | null initial_billing_limit?: number diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 4e4b0e6c63d9d..c92bcbf594b88 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0431_externaldataschema_sync_type_payload +posthog: 0432_personlessdistinctid sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index f39c4d09a66e9..ceacd890ecdf1 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -378,10 +378,6 @@ posthog/tasks/exports/test/test_export_utils.py:0: error: Function is missing a posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Function is missing a type annotation [no-untyped-def] -posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: error: Missing return statement [empty-body] -posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: note: If the method is meant to be abstract, use @abc.abstractmethod -posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: error: Missing return statement [empty-body] -posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: note: If the method is meant to be abstract, use @abc.abstractmethod posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type] posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc] diff --git a/package.json b/package.json index d349c7fede5f6..20c91b7885743 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,8 @@ "mobile-replay:web:schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'node_modules/@rrweb/types/dist/index.d.ts' --type 'eventWithTime' --expose all --no-top-ref --out ee/frontend/mobile-replay/schema/web/rr-web-schema.json && prettier --write ee/frontend/mobile-replay/schema/web/rr-web-schema.json", "mobile-replay:mobile:schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'ee/frontend/mobile-replay/mobile.types.ts' --type 'mobileEventWithTime' --expose all --no-top-ref --out ee/frontend/mobile-replay/schema/mobile/rr-mobile-schema.json && prettier --write ee/frontend/mobile-replay/schema/mobile/rr-mobile-schema.json", "mobile-replay:schema:build:json": "pnpm mobile-replay:web:schema:build:json && pnpm mobile-replay:mobile:schema:build:json", - "visualize-toolbar-bundle": "pnpm exec esbuild-visualizer --metadata ./toolbar-esbuild-meta.json --filename=toolbar-esbuild-bundle-visualization.html" + "visualize-toolbar-bundle": "pnpm exec esbuild-visualizer --metadata ./toolbar-esbuild-meta.json --filename=toolbar-esbuild-bundle-visualization.html", + "mypy-baseline-sync": "mypy -p posthog | mypy-baseline sync" }, "dependencies": { "@ant-design/icons": "^4.7.0", diff --git a/plugin-server/src/utils/db/db.ts b/plugin-server/src/utils/db/db.ts index de57b602725cc..89f9b060e268c 100644 --- a/plugin-server/src/utils/db/db.ts +++ b/plugin-server/src/utils/db/db.ts @@ -641,13 +641,20 @@ export class DB { isUserId: number | null, isIdentified: boolean, uuid: string, - distinctIds?: string[], - version = 0 + distinctIds?: { distinctId: string; version?: number }[], + tx?: TransactionClient ): Promise { distinctIds ||= [] + for (const distinctId of distinctIds) { + distinctId.version ||= 0 + } + + // The Person is being created, and so we can hardcode version 0! + const personVersion = 0 + const { rows } = await this.postgres.query( - PostgresUse.COMMON_WRITE, + tx ?? PostgresUse.COMMON_WRITE, `WITH inserted_person AS ( INSERT INTO posthog_person ( created_at, properties, properties_last_updated_at, @@ -662,7 +669,12 @@ export class DB { // `addDistinctIdPooled` (_, index) => `, distinct_id_${index} AS ( INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id, version) - VALUES ($${10 + index}, (SELECT id FROM inserted_person), $5, $9))` + VALUES ( + $${11 + index + distinctIds!.length - 1}, + (SELECT id FROM inserted_person), + $5, + $${10 + index}) + )` ) .join('') + `SELECT * FROM inserted_person;`, @@ -675,14 +687,21 @@ export class DB { isUserId, isIdentified, uuid, - version, + personVersion, // The copy and reverse here is to maintain compatability with pre-existing code // and tests. Postgres appears to assign IDs in reverse order of the INSERTs in the // CTEs above, so we need to reverse the distinctIds to match the old behavior where // we would do a round trip for each INSERT. We shouldn't actually depend on the // `id` column of distinct_ids, so this is just a simple way to keeps tests exactly // the same and prove behavior is the same as before. - ...distinctIds.slice().reverse(), + ...distinctIds + .slice() + .reverse() + .map(({ version }) => version), + ...distinctIds + .slice() + .reverse() + .map(({ distinctId }) => distinctId), ], 'insertPerson' ) @@ -698,8 +717,8 @@ export class DB { value: JSON.stringify({ person_id: person.uuid, team_id: teamId, - distinct_id: distinctId, - version, + distinct_id: distinctId.distinctId, + version: distinctId.version, is_deleted: 0, }), }, @@ -830,8 +849,66 @@ export class DB { return personDistinctIds.map((pdi) => pdi.distinct_id) } - public async addDistinctId(person: InternalPerson, distinctId: string, version: number): Promise { - const kafkaMessages = await this.addDistinctIdPooled(person, distinctId, version) + public async addPersonlessDistinctId(teamId: number, distinctId: string): Promise { + const result = await this.postgres.query( + PostgresUse.COMMON_WRITE, + ` + INSERT INTO posthog_personlessdistinctid (team_id, distinct_id, is_merged, created_at) + VALUES ($1, $2, false, now()) + ON CONFLICT (team_id, distinct_id) DO NOTHING + RETURNING is_merged + `, + [teamId, distinctId], + 'addPersonlessDistinctId' + ) + + if (result.rows.length === 1) { + return result.rows[0]['is_merged'] + } + + // ON CONFLICT ... DO NOTHING won't give us our RETURNING, so we have to do another SELECT + const existingResult = await this.postgres.query( + PostgresUse.COMMON_WRITE, + ` + SELECT is_merged + FROM posthog_personlessdistinctid + WHERE team_id = $1 AND distinct_id = $2 + `, + [teamId, distinctId], + 'addPersonlessDistinctId' + ) + + return existingResult.rows[0]['is_merged'] + } + + public async addPersonlessDistinctIdForMerge( + teamId: number, + distinctId: string, + tx?: TransactionClient + ): Promise { + const result = await this.postgres.query( + tx ?? PostgresUse.COMMON_WRITE, + ` + INSERT INTO posthog_personlessdistinctid (team_id, distinct_id, is_merged, created_at) + VALUES ($1, $2, true, now()) + ON CONFLICT (team_id, distinct_id) DO UPDATE + SET is_merged = true + RETURNING (xmax = 0) AS inserted + `, + [teamId, distinctId], + 'addPersonlessDistinctIdForMerge' + ) + + return result.rows[0].inserted + } + + public async addDistinctId( + person: InternalPerson, + distinctId: string, + version: number, + tx?: TransactionClient + ): Promise { + const kafkaMessages = await this.addDistinctIdPooled(person, distinctId, version, tx) if (kafkaMessages.length) { await this.kafkaProducer.queueMessages({ kafkaMessages, waitForAck: true }) } diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts index b0fd16fbde625..3475bc669528a 100644 --- a/plugin-server/src/worker/ingestion/person-state.ts +++ b/plugin-server/src/worker/ingestion/person-state.ts @@ -1,10 +1,12 @@ import { PluginEvent, Properties } from '@posthog/plugin-scaffold' import * as Sentry from '@sentry/node' import { ProducerRecord } from 'kafkajs' +import LRU from 'lru-cache' import { DateTime } from 'luxon' import { Counter } from 'prom-client' import { KafkaProducerWrapper } from 'utils/db/kafka-producer-wrapper' +import { ONE_HOUR } from '../../config/constants' import { KAFKA_PERSON_OVERRIDE } from '../../config/kafka-topics' import { InternalPerson, Person, PropertyUpdateOperation, TimestampFormat } from '../../types' import { DB } from '../../utils/db/db' @@ -57,6 +59,16 @@ const BARE_CASE_INSENSITIVE_ILLEGAL_IDS = [ 'false', ] +// Tracks whether we know we've already inserted a `posthog_personlessdistinctid` for the given +// (team_id, distinct_id) pair. If we have, then we can skip the INSERT attempt. +// TODO: Move this out of module scope, we don't currently have a clean place (outside of the Hub) +// to stash longer lived objects like caches. For now it's not important. +const PERSONLESS_DISTINCT_ID_INSERTED_CACHE = new LRU({ + max: 10_000, + maxAge: ONE_HOUR * 24, // cache up to 24h + updateAgeOnGet: true, +}) + const BARE_CASE_SENSITIVE_ILLEGAL_IDS = ['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined'] const PERSON_EVENTS = new Set(['$identify', '$create_alias', '$merge_dangerously', '$set']) @@ -110,7 +122,34 @@ export class PersonState { async update(): Promise<[Person, Promise]> { if (!this.processPerson) { - const existingPerson = await this.db.fetchPerson(this.teamId, this.distinctId, { useReadReplica: true }) + let existingPerson = await this.db.fetchPerson(this.teamId, this.distinctId, { useReadReplica: true }) + + if (!existingPerson) { + // See the comment in `mergeDistinctIds`. We are inserting a row into `posthog_personlessdistinctid` + // to note that this Distinct ID has been used in "personless" mode. This is necessary + // so that later, during a merge, we can decide whether we need to write out an override + // or not. + + const personlessDistinctIdCacheKey = `${this.teamId}|${this.distinctId}` + if (!PERSONLESS_DISTINCT_ID_INSERTED_CACHE.get(personlessDistinctIdCacheKey)) { + const personIsMerged = await this.db.addPersonlessDistinctId(this.teamId, this.distinctId) + + // We know the row is in PG now, and so future events for this Distinct ID can + // skip the PG I/O. + PERSONLESS_DISTINCT_ID_INSERTED_CACHE.set(personlessDistinctIdCacheKey, true) + + if (personIsMerged) { + // If `personIsMerged` comes back `true`, it means the `posthog_personlessdistinctid` + // has been updated by a merge (either since we called `fetchPerson` above, plus + // replication lag). We need to check `fetchPerson` again (this time using the leader) + // so that we properly associate this event with the Person we got merged into. + existingPerson = await this.db.fetchPerson(this.teamId, this.distinctId, { + useReadReplica: false, + }) + } + } + } + if (existingPerson) { const person = existingPerson as Person @@ -204,7 +243,7 @@ export class PersonState { // :NOTE: This should never be set in this branch, but adding this for logical consistency this.updateIsIdentified, this.event.uuid, - [this.distinctId] + [{ distinctId: this.distinctId }] ) return [person, true] } @@ -217,13 +256,13 @@ export class PersonState { isUserId: number | null, isIdentified: boolean, creatorEventUuid: string, - distinctIds: string[], - version = 0 + distinctIds: { distinctId: string; version?: number }[], + tx?: TransactionClient ): Promise { if (distinctIds.length < 1) { throw new Error('at least 1 distinctId is required in `createPerson`') } - const uuid = uuidFromDistinctId(teamId, distinctIds[0]) + const uuid = uuidFromDistinctId(teamId, distinctIds[0].distinctId) const props = { ...propertiesOnce, ...properties, ...{ $creator_event_uuid: creatorEventUuid } } const propertiesLastOperation: Record = {} @@ -247,7 +286,7 @@ export class PersonState { isIdentified, uuid, distinctIds, - version + tx ) } @@ -450,57 +489,144 @@ export class PersonState { const otherPerson = await this.db.fetchPerson(teamId, otherPersonDistinctId) const mergeIntoPerson = await this.db.fetchPerson(teamId, mergeIntoDistinctId) + // A note about the `distinctIdVersion` logic you'll find below: + // // Historically, we always INSERT-ed new `posthog_persondistinctid` rows with `version=0`. // Overrides are only created when the version is > 0, see: // https://github.com/PostHog/posthog/blob/92e17ce307a577c4233d4ab252eebc6c2207a5ee/posthog/models/person/sql.py#L269-L287 // - // With the addition of optional person processing, we are no longer creating + // With the addition of optional person profile processing, we are no longer creating // `posthog_persondistinctid` and `posthog_person` rows when $process_person_profile=false. - // This means that: - // 1. At merge time, it's possible this `distinct_id` and its deterministically generated - // `person.uuid` has already been used for events in ClickHouse, but they have no - // corresponding rows in the `posthog_persondistinctid` or `posthog_person` tables - // 2. We need to assume the `distinct_id`/`person.uuid` have been used before (by - // `$process_person_profile=false` events) and create an override row for this - // `distinct_id` even though we're just now INSERT-ing it into Postgres/ClickHouse. We do - // this by starting with `version=1`, as if we had just deleted the old user and were - // updating the `distinct_id` row as part of the merge - const addDistinctIdVersion = 1 - - if (otherPerson && !mergeIntoPerson) { - await this.db.addDistinctId(otherPerson, mergeIntoDistinctId, addDistinctIdVersion) - return [otherPerson, Promise.resolve()] - } else if (!otherPerson && mergeIntoPerson) { - await this.db.addDistinctId(mergeIntoPerson, otherPersonDistinctId, addDistinctIdVersion) - return [mergeIntoPerson, Promise.resolve()] + // This means that at merge time, it's possible this `distinct_id` and its deterministically + // generated `person.uuid` has already been used for events in ClickHouse, but they have no + // corresponding rows in the `posthog_persondistinctid` or `posthog_person` tables. + // + // For this reason, $process_person_profile=false write to the `posthog_personlessdistinctid` + // table just to note that a given Distinct ID was used for "personless" mode. Then, during + // our merges transactions below, we do two things: + // 1. We check whether a row exists in `posthog_personlessdistinctid` for that Distinct ID, + // if so, we need to write out `posthog_persondistinctid` rows with `version=1` so that + // an override is created in ClickHouse which will associate the old "personless" events + // with the Person UUID they were merged into. + // 2. We insert and/or update the `posthog_personlessdistinctid` ourselves, to mark that + // the Distinct ID has been merged. This is important so that an event being processed + // concurrently for that Distinct ID doesn't emit an event and _miss_ that a different + // Person UUID needs to be used now. (See the `processPerson` code in `update` for more.) + + if ((otherPerson && !mergeIntoPerson) || (!otherPerson && mergeIntoPerson)) { + // Only one of the two Distinct IDs points at an existing Person + + const [existingPerson, distinctIdToAdd] = (() => { + if (otherPerson) { + return [otherPerson!, mergeIntoDistinctId] + } else { + return [mergeIntoPerson!, otherPersonDistinctId] + } + })() + + return await this.db.postgres.transaction( + PostgresUse.COMMON_WRITE, + 'mergeDistinctIds-OneExists', + async (tx) => { + // See comment above about `distinctIdVersion` + const _insertedDistinctId = await this.db.addPersonlessDistinctIdForMerge( + this.teamId, + distinctIdToAdd, + tx + ) + const distinctIdVersion = 1 // TODO: Once `posthog_personlessdistinctid` is backfilled: insertedDistinctId ? 0 : 1 + + await this.db.addDistinctId(existingPerson, distinctIdToAdd, distinctIdVersion, tx) + return [existingPerson, Promise.resolve()] + } + ) } else if (otherPerson && mergeIntoPerson) { + // Both Distinct IDs point at an existing Person + if (otherPerson.id == mergeIntoPerson.id) { + // Nothing to do, they are the same Person return [mergeIntoPerson, Promise.resolve()] } + return await this.mergePeople({ mergeInto: mergeIntoPerson, mergeIntoDistinctId: mergeIntoDistinctId, otherPerson: otherPerson, otherPersonDistinctId: otherPersonDistinctId, }) - } + } else { + // Neither Distinct ID points at an existing Person + + let distinctId1 = mergeIntoDistinctId + let distinctId2 = otherPersonDistinctId + + return await this.db.postgres.transaction( + PostgresUse.COMMON_WRITE, + 'mergeDistinctIds-NeitherExist', + async (tx) => { + // See comment above about `distinctIdVersion` + const insertedDistinctId1 = await this.db.addPersonlessDistinctIdForMerge( + this.teamId, + distinctId1, + tx + ) - // The last case: (!oldPerson && !newPerson) - return [ - await this.createPerson( - // TODO: in this case we could skip the properties updates later - timestamp, - this.eventProperties['$set'] || {}, - this.eventProperties['$set_once'] || {}, - teamId, - null, - true, - this.event.uuid, - [mergeIntoDistinctId, otherPersonDistinctId], - addDistinctIdVersion - ), - Promise.resolve(), - ] + // See comment above about `distinctIdVersion` + const insertedDistinctId2 = await this.db.addPersonlessDistinctIdForMerge( + this.teamId, + distinctId2, + tx + ) + + // `createPerson` uses the first Distinct ID provided to generate the Person + // UUID. That means the first Distinct ID definitely doesn't need an override, + // and can always use version 0. Below, we exhaust all of the options to decide + // whether we can optimize away an override by doing a swap, or whether we + // need to actually write an override. (But mostly we're being verbose for + // documentation purposes) + let distinctId2Version = 1 // TODO: Once `posthog_personlessdistinctid` is backfilled, this should be = 0 + if (insertedDistinctId1 && insertedDistinctId2) { + // We were the first to insert both (neither was used for Personless), so we + // can use either as the primary Person UUID and create no overrides. + } else if (insertedDistinctId1 && !insertedDistinctId2) { + // We created 1, but 2 was already used for Personless. Let's swap so + // that 2 can be the primary Person UUID and no override is needed. + ;[distinctId1, distinctId2] = [distinctId2, distinctId1] + } else if (!insertedDistinctId1 && insertedDistinctId2) { + // We created 2, but 1 was already used for Personless, so we want to + // use 1 as the primary Person UUID so that no override is needed. + } else if (!insertedDistinctId1 && !insertedDistinctId2) { + // Both were used in Personless mode, so there is no more-correct choice of + // primary Person UUID to make here, and we need to drop an override by + // using version = 1 for Distinct ID 2. + distinctId2Version = 1 + } + + // The first Distinct ID is used to create the new Person's UUID, and so it + // never needs an override. + const distinctId1Version = 0 + + return [ + await this.createPerson( + // TODO: in this case we could skip the properties updates later + timestamp, + this.eventProperties['$set'] || {}, + this.eventProperties['$set_once'] || {}, + teamId, + null, + true, + this.event.uuid, + [ + { distinctId: distinctId1, version: distinctId1Version }, + { distinctId: distinctId2, version: distinctId2Version }, + ], + tx + ), + Promise.resolve(), + ] + } + ) + } } public async mergePeople({ diff --git a/plugin-server/tests/main/db.test.ts b/plugin-server/tests/main/db.test.ts index 8d419b0b9fdb1..1ff74e418f20d 100644 --- a/plugin-server/tests/main/db.test.ts +++ b/plugin-server/tests/main/db.test.ts @@ -284,6 +284,23 @@ describe('DB', () => { return selectResult.rows[0] } + test('addPersonlessDistinctId', async () => { + const team = await getFirstTeam(hub) + await db.addPersonlessDistinctId(team.id, 'addPersonlessDistinctId') + + // This will conflict, but shouldn't throw an error + await db.addPersonlessDistinctId(team.id, 'addPersonlessDistinctId') + + const result = await db.postgres.query( + PostgresUse.COMMON_WRITE, + 'SELECT id FROM posthog_personlessdistinctid WHERE team_id = $1 AND distinct_id = $2', + [team.id, 'addPersonlessDistinctId'], + 'addPersonlessDistinctId' + ) + + expect(result.rows.length).toEqual(1) + }) + describe('createPerson', () => { let team: Team const uuid = new UUIDT().toString() @@ -294,7 +311,7 @@ describe('DB', () => { }) test('without properties', async () => { - const person = await db.createPerson(TIMESTAMP, {}, {}, {}, team.id, null, false, uuid, [distinctId]) + const person = await db.createPerson(TIMESTAMP, {}, {}, {}, team.id, null, false, uuid, [{ distinctId }]) const fetched_person = await fetchPersonByPersonId(team.id, person.id) expect(fetched_person!.is_identified).toEqual(false) @@ -306,7 +323,7 @@ describe('DB', () => { }) test('without properties indentified true', async () => { - const person = await db.createPerson(TIMESTAMP, {}, {}, {}, team.id, null, true, uuid, [distinctId]) + const person = await db.createPerson(TIMESTAMP, {}, {}, {}, team.id, null, true, uuid, [{ distinctId }]) const fetched_person = await fetchPersonByPersonId(team.id, person.id) expect(fetched_person!.is_identified).toEqual(true) expect(fetched_person!.properties).toEqual({}) @@ -326,7 +343,7 @@ describe('DB', () => { null, false, uuid, - [distinctId] + [{ distinctId }] ) const fetched_person = await fetchPersonByPersonId(team.id, person.id) expect(fetched_person!.is_identified).toEqual(false) @@ -354,7 +371,7 @@ describe('DB', () => { const distinctId = 'distinct_id1' // Note that we update the person badly in case of concurrent updates, but lets make sure we're consistent const personDbBefore = await db.createPerson(TIMESTAMP, { c: 'aaa' }, {}, {}, team.id, null, false, uuid, [ - distinctId, + { distinctId }, ]) const providedPersonTs = DateTime.fromISO('2000-04-04T11:42:06.502Z').toUTC() const personProvided = { ...personDbBefore, properties: { c: 'bbb' }, created_at: providedPersonTs } @@ -486,7 +503,7 @@ describe('DB', () => { const team = await getFirstTeam(hub) const uuid = new UUIDT().toString() const createdPerson = await db.createPerson(TIMESTAMP, { foo: 'bar' }, {}, {}, team.id, null, true, uuid, [ - 'some_id', + { distinctId: 'some_id' }, ]) const person = await db.fetchPerson(team.id, 'some_id') @@ -852,7 +869,7 @@ describe('DB', () => { null, false, new UUIDT().toString(), - ['source_person'] + [{ distinctId: 'source_person' }] ) const targetPerson = await db.createPerson( TIMESTAMP, @@ -863,7 +880,7 @@ describe('DB', () => { null, false, new UUIDT().toString(), - ['target_person'] + [{ distinctId: 'target_person' }] ) sourcePersonID = sourcePerson.id targetPersonID = targetPerson.id diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts index 72bb5879945a9..9d9056ce8c380 100644 --- a/plugin-server/tests/main/process-event.test.ts +++ b/plugin-server/tests/main/process-event.test.ts @@ -50,7 +50,7 @@ export async function createPerson( null, false, new UUIDT().toString(), - distinctIds + distinctIds.map((distinctId) => ({ distinctId })) ) } @@ -1764,7 +1764,8 @@ describe('when handling $identify', () => { // completing before continuing with the first identify. const originalCreatePerson = hub.db.createPerson.bind(hub.db) const createPersonMock = jest.fn(async (...args) => { - const result = await originalCreatePerson(...args) + // We need to slice off the txn arg, or else we conflict with the `identify` below. + const result = await originalCreatePerson(...args.slice(0, -1)) if (createPersonMock.mock.calls.length === 1) { // On second invocation, make another identify call diff --git a/plugin-server/tests/worker/ingestion/action-matcher.test.ts b/plugin-server/tests/worker/ingestion/action-matcher.test.ts index a66a8d03bb2be..d4a5770355a4b 100644 --- a/plugin-server/tests/worker/ingestion/action-matcher.test.ts +++ b/plugin-server/tests/worker/ingestion/action-matcher.test.ts @@ -741,7 +741,7 @@ describe('ActionMatcher', () => { null, true, new UUIDT().toString(), - ['random'] + [{ distinctId: 'random' }] ) const cohortPerson = await hub.db.createPerson( @@ -753,7 +753,7 @@ describe('ActionMatcher', () => { null, true, new UUIDT().toString(), - ['cohort'] + [{ distinctId: 'cohort' }] ) await hub.db.addPersonToCohort(testCohort.id, cohortPerson.id, testCohort.version) diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/prepareEventStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/prepareEventStep.test.ts index 4c1467653f324..d09a149d44c8b 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/prepareEventStep.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/prepareEventStep.test.ts @@ -59,7 +59,7 @@ describe('prepareEventStep()', () => { // :KLUDGE: We test below whether kafka messages are produced, so make sure the person exists beforehand. await hub.db.createPerson(person.created_at, {}, {}, {}, pluginEvent.team_id, null, false, person.uuid, [ - 'my_id', + { distinctId: 'my_id' }, ]) hub.db.kafkaProducer!.queueMessage = jest.fn() diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts index bed64d243e6c5..06bd8086ead84 100644 --- a/plugin-server/tests/worker/ingestion/person-state.test.ts +++ b/plugin-server/tests/worker/ingestion/person-state.test.ts @@ -222,39 +222,97 @@ describe('PersonState.update()', () => { expect(distinctIds).toEqual(expect.arrayContaining([])) }) - it('merging creates an override and force_upgrade works', async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) + it('overrides are created only when distinct_id is in posthog_personlessdistinctid', async () => { + // oldUserDistinctId exists, and 'old2' will merge into it, but not create an override + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + + // newUserDistinctId exists, and 'new2' will merge into it, and will create an override + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) + await hub.db.addPersonlessDistinctId(teamId, 'new2') const hubParam = undefined - let processPerson = true + const processPerson = true const [_person, kafkaAcks] = await personState( + { + event: '$identify', + distinct_id: oldUserDistinctId, + properties: { + $anon_distinct_id: 'old2', + }, + }, + hubParam, + processPerson + ).update() + + const [_person2, kafkaAcks2] = await personState( { event: '$identify', distinct_id: newUserDistinctId, properties: { - $anon_distinct_id: oldUserDistinctId, + $anon_distinct_id: 'new2', }, }, hubParam, processPerson ).update() + await hub.db.kafkaProducer.flush() await kafkaAcks + await kafkaAcks2 - await delayUntilEventIngested(() => fetchOverridesForDistinctId(newUserDistinctId)) - const chOverrides = await fetchOverridesForDistinctId(newUserDistinctId) + // new2 has an override, because it was in posthog_personlessdistinctid + await delayUntilEventIngested(() => fetchOverridesForDistinctId('new2')) + const chOverrides = await fetchOverridesForDistinctId('new2') expect(chOverrides.length).toEqual(1) - - // Override created for Person that never existed in the DB expect(chOverrides).toEqual( expect.arrayContaining([ expect.objectContaining({ - distinct_id: newUserDistinctId, + distinct_id: 'new2', + person_id: newUserUuid, + version: 1, + }), + ]) + ) + + // old2 does have an override, because we are temporarily writing out unnecessary + // overrides while we backfill `posthog_personlessdistinctid` + const chOverridesOld = await fetchOverridesForDistinctId('old2') + expect(chOverridesOld.length).toEqual(1) + expect(chOverridesOld).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + distinct_id: 'old2', person_id: oldUserUuid, version: 1, }), ]) ) + }) + + it('force_upgrade works', async () => { + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + + const hubParam = undefined + let processPerson = true + const [_person, kafkaAcks] = await personState( + { + event: '$identify', + distinct_id: newUserDistinctId, + properties: { + $anon_distinct_id: oldUserDistinctId, + }, + }, + hubParam, + processPerson + ).update() + await hub.db.kafkaProducer.flush() + await kafkaAcks // Using the `distinct_id` again with `processPerson=false` results in // `force_upgrade=true` and real Person `uuid` and `created_at` @@ -378,7 +436,9 @@ describe('PersonState.update()', () => { }) it('handles person being created in a race condition', async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) jest.spyOn(hub.db, 'fetchPerson').mockImplementationOnce(() => { return Promise.resolve(undefined) @@ -415,7 +475,7 @@ describe('PersonState.update()', () => { it('handles person being created in a race condition updates properties if needed', async () => { await hub.db.createPerson(timestamp, { b: 3, c: 4 }, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, + { distinctId: newUserDistinctId }, ]) jest.spyOn(hub.db, 'fetchPerson').mockImplementationOnce(() => { @@ -503,7 +563,7 @@ describe('PersonState.update()', () => { null, false, newUserUuid, - [newUserDistinctId] + [{ distinctId: newUserDistinctId }] ) const [person, kafkaAcks] = await personState({ @@ -539,7 +599,7 @@ describe('PersonState.update()', () => { it('updates person properties - no update if not needed', async () => { await hub.db.createPerson(timestamp, { $current_url: 123 }, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, + { distinctId: newUserDistinctId }, ]) const [person, kafkaAcks] = await personState({ @@ -581,7 +641,7 @@ describe('PersonState.update()', () => { it('updates person properties - always update for person events', async () => { await hub.db.createPerson(timestamp, { $current_url: 123 }, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, + { distinctId: newUserDistinctId }, ]) const [person, kafkaAcks] = await personState({ @@ -614,7 +674,9 @@ describe('PersonState.update()', () => { }) it('updates person properties - always update if undefined before', async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const [person, kafkaAcks] = await personState({ event: '$pageview', @@ -655,7 +717,7 @@ describe('PersonState.update()', () => { null, false, newUserUuid, - [newUserDistinctId] + [{ distinctId: newUserDistinctId }] ) const [person, kafkaAcks] = await personState({ @@ -697,7 +759,7 @@ describe('PersonState.update()', () => { null, false, newUserUuid, - [newUserDistinctId] + [{ distinctId: newUserDistinctId }] ) const personS = personState({ @@ -736,7 +798,7 @@ describe('PersonState.update()', () => { it('does not update person if not needed', async () => { await hub.db.createPerson(timestamp, { b: 3, c: 4 }, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, + { distinctId: newUserDistinctId }, ]) const [person, kafkaAcks] = await personState({ @@ -771,7 +833,9 @@ describe('PersonState.update()', () => { }) it('marks user as is_identified', async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const personS = personState({ event: '$pageview', distinct_id: newUserDistinctId, @@ -823,8 +887,8 @@ describe('PersonState.update()', () => { properties_last_operation: {}, } await hub.db.createPerson(timestamp, { a: 6, c: 8 }, {}, {}, teamId, null, true, newUserUuid, [ - newUserDistinctId, - oldUserDistinctId, + { distinctId: newUserDistinctId }, + { distinctId: oldUserDistinctId }, ]) // the merged Person const personS = personState({ @@ -901,7 +965,7 @@ describe('PersonState.update()', () => { uuid: newUserUuid, properties: { foo: 'bar' }, created_at: timestamp, - version: 1, + version: 0, is_identified: true, }) ) @@ -920,8 +984,8 @@ describe('PersonState.update()', () => { it(`marks is_identified to be updated when no changes to distinct_ids but $anon_distinct_id passe`, async () => { await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, - oldUserDistinctId, + { distinctId: newUserDistinctId }, + { distinctId: oldUserDistinctId }, ]) const personS = personState({ @@ -954,7 +1018,9 @@ describe('PersonState.update()', () => { }) it(`add distinct id and marks user is_identified when passed $anon_distinct_id person does not exists and distinct_id does`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const personS = personState({ event: '$identify', @@ -990,7 +1056,9 @@ describe('PersonState.update()', () => { }) it(`add distinct id and marks user as is_identified when passed $anon_distinct_id person exists and distinct_id does not`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) const personS = personState({ event: '$identify', @@ -1027,8 +1095,12 @@ describe('PersonState.update()', () => { }) it(`merge into distinct_id person and marks user as is_identified when both persons have is_identified false`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) - await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const [person, kafkaAcks] = await personState({ event: '$identify', @@ -1090,8 +1162,12 @@ describe('PersonState.update()', () => { }) it(`merge into distinct_id person and marks user as is_identified when distinct_id user is identified and $anon_distinct_id user is not`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) - await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const [person, kafkaAcks] = await personState({ event: '$identify', @@ -1153,8 +1229,12 @@ describe('PersonState.update()', () => { }) it(`does not merge people when distinct_id user is not identified and $anon_distinct_id user is`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [oldUserDistinctId]) - await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, false, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, false, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const personS = personState({ event: '$identify', @@ -1202,8 +1282,12 @@ describe('PersonState.update()', () => { }) it(`does not merge people when both users are identified`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [oldUserDistinctId]) - await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const [person, kafkaAcks] = await personState({ event: '$identify', @@ -1250,10 +1334,10 @@ describe('PersonState.update()', () => { it(`merge into distinct_id person and updates properties with $set/$set_once`, async () => { await hub.db.createPerson(timestamp, { a: 1, b: 2 }, {}, {}, teamId, null, false, oldUserUuid, [ - oldUserDistinctId, + { distinctId: oldUserDistinctId }, ]) await hub.db.createPerson(timestamp2, { b: 3, c: 4, d: 5 }, {}, {}, teamId, null, false, newUserUuid, [ - newUserDistinctId, + { distinctId: newUserDistinctId }, ]) const [person, kafkaAcks] = await personState({ @@ -1318,7 +1402,9 @@ describe('PersonState.update()', () => { }) it(`handles race condition when other thread creates the user`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [oldUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) // Fake the race by assuming createPerson was called before the addDistinctId creation above jest.spyOn(hub.db, 'addDistinctId').mockImplementation(async (person, distinctId) => { @@ -1331,7 +1417,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, distinctId), - [distinctId] + [{ distinctId }] ) await hub.db.addDistinctId(person, distinctId, 0) // this throws }) @@ -1435,8 +1521,12 @@ describe('PersonState.update()', () => { describe(`overrides: ${useOverridesMode}`, () => { // only difference between $merge_dangerously and $identify it(`merge_dangerously can merge people when alias id user is identified`, async () => { - await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [oldUserDistinctId]) - await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [newUserDistinctId]) + await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, oldUserUuid, [ + { distinctId: oldUserDistinctId }, + ]) + await hub.db.createPerson(timestamp2, {}, {}, {}, teamId, null, true, newUserUuid, [ + { distinctId: newUserDistinctId }, + ]) const [person, kafkaAcks] = await personState({ event: '$merge_dangerously', @@ -1569,7 +1659,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'anonymous_id'), - ['anonymous_id'] + [{ distinctId: 'anonymous_id' }] ) const identifiedPerson = await hub.db.createPerson( timestamp, @@ -1580,7 +1670,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'new_distinct_id'), - ['new_distinct_id'] + [{ distinctId: 'new_distinct_id' }] ) // existing overrides @@ -1646,7 +1736,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'anonymous_id'), - ['anonymous_id'] + [{ distinctId: 'anonymous_id' }] ) const identifiedPerson = await hub.db.createPerson( timestamp, @@ -1657,7 +1747,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'new_distinct_id'), - ['new_distinct_id'] + [{ distinctId: 'new_distinct_id' }] ) // existing overrides for both anonPerson and identifiedPerson @@ -1731,7 +1821,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'anonymous_id'), - ['anonymous_id'] + [{ distinctId: 'anonymous_id' }] ) const identifiedPerson = await hub.db.createPerson( timestamp, @@ -1742,7 +1832,7 @@ describe('PersonState.update()', () => { null, false, uuidFromDistinctId(teamId, 'new_distinct_id'), - ['new_distinct_id'] + [{ distinctId: 'new_distinct_id' }] ) await insertRow(hub.db.postgres, 'posthog_featureflaghashkeyoverride', { @@ -1814,8 +1904,8 @@ describe('PersonState.update()', () => { describe(`overrides: ${useOverridesMode}`, () => { it(`no-op if persons already merged`, async () => { await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, true, firstUserUuid, [ - firstUserDistinctId, - secondUserDistinctId, + { distinctId: firstUserDistinctId }, + { distinctId: secondUserDistinctId }, ]) const state: PersonState = personState({}, hub) jest.spyOn(hub.db.kafkaProducer, 'queueMessages') @@ -1852,7 +1942,7 @@ describe('PersonState.update()', () => { null, false, firstUserUuid, - [firstUserDistinctId] + [{ distinctId: firstUserDistinctId }] ) const second: InternalPerson = await hub.db.createPerson( timestamp, @@ -1863,7 +1953,7 @@ describe('PersonState.update()', () => { null, false, secondUserUuid, - [secondUserDistinctId] + [{ distinctId: secondUserDistinctId }] ) const state: PersonState = personState({}, hub) @@ -1945,7 +2035,7 @@ describe('PersonState.update()', () => { null, false, firstUserUuid, - [firstUserDistinctId] + [{ distinctId: firstUserDistinctId }] ) const second: InternalPerson = await hub.db.createPerson( timestamp, @@ -1956,7 +2046,7 @@ describe('PersonState.update()', () => { null, false, secondUserUuid, - [secondUserDistinctId] + [{ distinctId: secondUserDistinctId }] ) const state: PersonState = personState({}, hub) @@ -2005,10 +2095,10 @@ describe('PersonState.update()', () => { it(`retries merges up to retry limit if postgres down`, async () => { await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, firstUserUuid, [ - firstUserDistinctId, + { distinctId: firstUserDistinctId }, ]) await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, secondUserUuid, [ - secondUserDistinctId, + { distinctId: secondUserDistinctId }, ]) const state: PersonState = personState({}, hub) @@ -2054,10 +2144,10 @@ describe('PersonState.update()', () => { it(`handleIdentifyOrAlias does not throw on merge failure`, async () => { // TODO: This the current state, we should probably change it await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, firstUserUuid, [ - firstUserDistinctId, + { distinctId: firstUserDistinctId }, ]) await hub.db.createPerson(timestamp, {}, {}, {}, teamId, null, false, secondUserUuid, [ - secondUserDistinctId, + { distinctId: secondUserDistinctId }, ]) const state: PersonState = personState( @@ -2117,7 +2207,7 @@ describe('PersonState.update()', () => { null, false, firstUserUuid, - [firstUserDistinctId] + [{ distinctId: firstUserDistinctId }] ) const second: InternalPerson = await hub.db.createPerson( timestamp, @@ -2128,7 +2218,7 @@ describe('PersonState.update()', () => { null, false, secondUserUuid, - [secondUserDistinctId] + [{ distinctId: secondUserDistinctId }] ) const state: PersonState = personState({}, hub) @@ -2245,7 +2335,7 @@ describe('PersonState.update()', () => { null, false, firstUserUuid, - [firstUserDistinctId] + [{ distinctId: firstUserDistinctId }] ) const second: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 2 }), @@ -2256,7 +2346,7 @@ describe('PersonState.update()', () => { null, false, secondUserUuid, - [secondUserDistinctId] + [{ distinctId: secondUserDistinctId }] ) const third: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 5 }), @@ -2267,7 +2357,7 @@ describe('PersonState.update()', () => { null, false, new UUIDT().toString(), - ['third'] + [{ distinctId: 'third' }] ) // We want to simulate a concurrent update to person_overrides. We do @@ -2393,7 +2483,7 @@ describe('PersonState.update()', () => { null, false, firstUserUuid, - [firstUserDistinctId] + [{ distinctId: firstUserDistinctId }] ) const second: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 2 }), @@ -2404,7 +2494,7 @@ describe('PersonState.update()', () => { null, false, secondUserUuid, - [secondUserDistinctId] + [{ distinctId: secondUserDistinctId }] ) const third: InternalPerson = await hub.db.createPerson( timestamp.plus({ minutes: 5 }), @@ -2415,7 +2505,7 @@ describe('PersonState.update()', () => { null, false, new UUIDT().toString(), - ['third'] + [{ distinctId: 'third' }] ) await personState( diff --git a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts index 2879328275478..632241251331d 100644 --- a/plugin-server/tests/worker/ingestion/postgres-parity.test.ts +++ b/plugin-server/tests/worker/ingestion/postgres-parity.test.ts @@ -78,7 +78,7 @@ describe('postgres parity', () => { null, true, uuid, - ['distinct1', 'distinct2'] + [{ distinctId: 'distinct1' }, { distinctId: 'distinct2' }] ) await delayUntilEventIngested(() => hub.db.fetchPersons(Database.ClickHouse)) await delayUntilEventIngested(() => hub.db.fetchDistinctIdValues(person, Database.ClickHouse), 2) @@ -170,7 +170,7 @@ describe('postgres parity', () => { null, false, uuid, - ['distinct1', 'distinct2'] + [{ distinctId: 'distinct1' }, { distinctId: 'distinct2' }] ) await delayUntilEventIngested(() => hub.db.fetchPersons(Database.ClickHouse)) await delayUntilEventIngested(() => hub.db.fetchDistinctIdValues(person, Database.ClickHouse), 2) @@ -251,7 +251,7 @@ describe('postgres parity', () => { null, true, uuid, - ['distinct1'] + [{ distinctId: 'distinct1' }] ) const anotherPerson = await hub.db.createPerson( DateTime.utc(), @@ -262,7 +262,7 @@ describe('postgres parity', () => { null, true, uuid2, - ['another_distinct_id'] + [{ distinctId: 'another_distinct_id' }] ) await delayUntilEventIngested(() => hub.db.fetchPersons(Database.ClickHouse)) const [postgresPerson] = await hub.db.fetchPersons(Database.Postgres) @@ -334,7 +334,7 @@ describe('postgres parity', () => { null, false, uuid, - ['distinct1'] + [{ distinctId: 'distinct1' }] ) const anotherPerson = await hub.db.createPerson( DateTime.utc(), @@ -345,7 +345,7 @@ describe('postgres parity', () => { null, true, uuid2, - ['another_distinct_id'] + [{ distinctId: 'another_distinct_id' }] ) await delayUntilEventIngested(() => hub.db.fetchPersons(Database.ClickHouse)) const [postgresPerson] = await hub.db.fetchPersons(Database.Postgres) diff --git a/plugin-server/tests/worker/ingestion/process-event.test.ts b/plugin-server/tests/worker/ingestion/process-event.test.ts index b9947bb7eec74..e4353bdfc36f0 100644 --- a/plugin-server/tests/worker/ingestion/process-event.test.ts +++ b/plugin-server/tests/worker/ingestion/process-event.test.ts @@ -63,7 +63,7 @@ describe('EventsProcessor#createEvent()', () => { null, false, personUuid, - ['my_id'] + [{ distinctId: 'my_id' }] ) }) diff --git a/plugin-server/tests/worker/ingestion/properties-updater.test.ts b/plugin-server/tests/worker/ingestion/properties-updater.test.ts index b5bc38d64d2d8..16cde1c7e84ab 100644 --- a/plugin-server/tests/worker/ingestion/properties-updater.test.ts +++ b/plugin-server/tests/worker/ingestion/properties-updater.test.ts @@ -29,7 +29,7 @@ describe('properties-updater', () => { db = hub.db team = await getFirstTeam(hub) - await db.createPerson(PAST_TIMESTAMP, {}, {}, {}, team.id, null, false, uuid, [distinctId]) + await db.createPerson(PAST_TIMESTAMP, {}, {}, {}, team.id, null, false, uuid, [{ distinctId }]) jest.spyOn(hub.db, 'updateGroup') jest.spyOn(hub.db, 'insertGroup') diff --git a/posthog/api/capture.py b/posthog/api/capture.py index 6081f6e646937..73f71fad5d593 100644 --- a/posthog/api/capture.py +++ b/posthog/api/capture.py @@ -1,5 +1,6 @@ import json import re +from random import random import sentry_sdk import structlog @@ -41,6 +42,7 @@ preprocess_replay_events_for_blob_ingestion, split_replay_events, ) +from posthog.storage import object_storage from posthog.utils import get_ip_address from posthog.utils_cors import cors_response @@ -639,7 +641,8 @@ def replace_with_warning(event: dict[str, Any]) -> dict[str, Any] | None: We do this so that when we're playing back the recording we can insert useful info in the UI. """ try: - # + sample_replay_data_to_object_storage(event, random()) + properties = event.pop("properties", {}) snapshot_items = properties.pop("$snapshot_items", []) # since we had message too large there really should be an item in the list @@ -677,6 +680,23 @@ def replace_with_warning(event: dict[str, Any]) -> dict[str, Any] | None: return None +def sample_replay_data_to_object_storage(event: dict[str, Any], random_number: float) -> None: + """ + the random number is passed in to make testing easier + both the random number and the sample rate must be between 0 and 0.01 + if the random number is less than the sample_rate then we write the event to S3 + """ + try: + sample_rate = settings.REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE + if 0 < random_number < sample_rate <= 0.01: + object_key = f"session_id/{event.get('properties', {}).get('$session_id', 'unknown')}.json" + object_storage.write(object_key, json.dumps(event), bucket=settings.REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET) + except Exception as ex: + with sentry_sdk.push_scope() as scope: + scope.set_tag("capture-pathway", "replay") + capture_exception(ex) + + def preprocess_events(events: list[dict[str, Any]]) -> Iterator[tuple[dict[str, Any], UUIDT, str]]: for event in events: event_uuid = UUIDT() diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 029a3186d4365..62647ff923925 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -246,10 +246,17 @@ def properties_all_match(predicate): detail=f"Invalid date value: {prop.value}", code="invalid_date" ) - # make sure regex and icontains properties have string values - if prop.operator in ["regex", "icontains", "not_regex", "not_icontains"] and not isinstance( - prop.value, str - ): + # make sure regex, icontains, gte, lte, lt, and gt properties have string values + if prop.operator in [ + "regex", + "icontains", + "not_regex", + "not_icontains", + "gte", + "lte", + "gt", + "lt", + ] and not isinstance(prop.value, str): raise serializers.ValidationError( detail=f"Invalid value for operator {prop.operator}: {prop.value}", code="invalid_value" ) diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index 475b0ab956bb2..0eb5a83cdcc17 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -1801,26 +1801,6 @@ LIMIT 100 SETTINGS optimize_aggregation_in_order = 1 ''' # --- -# name: TestFeatureFlag.test_creating_static_cohort.16 - ''' - /* user_id:0 request:_snapshot_ */ - SELECT id - FROM person - INNER JOIN - (SELECT person_id - FROM person_static_cohort - WHERE team_id = 2 - AND cohort_id = 2 - GROUP BY person_id, - cohort_id, - team_id) cohort_persons ON cohort_persons.person_id = person.id - WHERE team_id = 2 - GROUP BY id - HAVING max(is_deleted) = 0 - ORDER BY argMax(person.created_at, version) DESC, id DESC - LIMIT 100 SETTINGS optimize_aggregation_in_order = 1 - ''' -# --- # name: TestFeatureFlag.test_creating_static_cohort.2 ''' SELECT "posthog_organizationmembership"."id", diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index c4f2c79e5e14b..153bc50ce1e1b 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -1,20 +1,27 @@ -from collections import Counter -from unittest import mock - import base64 import gzip import json -from django.test import override_settings -import lzstring import pathlib -import pytest import random import string +from collections import Counter +from datetime import UTC +from datetime import datetime, timedelta +from typing import Any, Union, cast +from unittest import mock +from unittest.mock import ANY, MagicMock, call +from unittest.mock import patch +from urllib.parse import quote + +import lzstring +import pytest import structlog import zlib -from datetime import datetime, timedelta -from datetime import UTC +from boto3 import resource +from botocore.client import Config +from botocore.exceptions import ClientError from django.http import HttpResponse +from django.test import override_settings from django.test.client import MULTIPART_CONTENT, Client from django.utils import timezone from freezegun import freeze_time @@ -25,9 +32,6 @@ from prance import ResolvingParser from rest_framework import status from token_bucket import Limiter, MemoryStorage -from typing import Any, Union, cast -from unittest.mock import ANY, MagicMock, call, patch -from urllib.parse import quote from ee.billing.quota_limiting import QuotaLimitingCaches from posthog.api import capture @@ -35,6 +39,7 @@ LIKELY_ANONYMOUS_IDS, get_distinct_id, is_randomly_partitioned, + sample_replay_data_to_object_storage, ) from posthog.api.test.mock_sentry import mock_sentry_context_for_tagging from posthog.api.test.openapi_validation import validate_response @@ -49,6 +54,13 @@ DATA_UPLOAD_MAX_MEMORY_SIZE, KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, ) +from posthog.settings import ( + OBJECT_STORAGE_ACCESS_KEY_ID, + OBJECT_STORAGE_ENDPOINT, + OBJECT_STORAGE_SECRET_ACCESS_KEY, +) +from posthog.storage import object_storage +from posthog.storage.object_storage import ObjectStorageError from posthog.test.base import BaseTest @@ -148,6 +160,51 @@ def mocked_get_ingest_context_from_token(_: Any) -> None: "uuid": "deaa7e00-e1a4-480d-9145-fb8461678dae", } +TEST_SAMPLES_BUCKET = "posthog-test-replay-samples" + +s3 = resource( + "s3", + endpoint_url=OBJECT_STORAGE_ENDPOINT, + aws_access_key_id=OBJECT_STORAGE_ACCESS_KEY_ID, + aws_secret_access_key=OBJECT_STORAGE_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + region_name="us-east-1", +) + + +# snapshot events are processed and altered during capture processing +def make_processed_recording_event( + event_data: dict | list[dict] | None = None, + session_id="abc123", + window_id="def456", + distinct_id="ghi789", + timestamp=1658516991883, + snapshot_bytes=60, +) -> dict[str, Any]: + if event_data is None: + # event_data is an array of RRWeb events + event_data = [{"type": 3, "data": {"source": 1}}, {"type": 3, "data": {"source": 2}}] + + if isinstance(event_data, dict): + event_data = [event_data] + + return { + "event": "$snapshot_items", + "properties": { + # estimate of the size of the event data + "$snapshot_bytes": snapshot_bytes, + "$snapshot_items": event_data, + "$session_id": session_id, + "$window_id": window_id, + # snapshot events have the distinct id in the properties + # as well as at the top-level + "distinct_id": distinct_id, + "$snapshot_source": "web", + }, + "timestamp": timestamp, + "distinct_id": distinct_id, + } + class TestCapture(BaseTest): """ @@ -162,6 +219,16 @@ def setUp(self): # it is really important to know that /capture is CSRF exempt. Enforce checking in the client self.client = Client(enforce_csrf_checks=True) + try: + s3.meta.client.head_bucket(Bucket=TEST_SAMPLES_BUCKET) + except ClientError: + # probably the bucket doesn't exist + s3.create_bucket(Bucket=TEST_SAMPLES_BUCKET) + + def teardown_method(self, method) -> None: + bucket = s3.Bucket(TEST_SAMPLES_BUCKET) + bucket.objects.delete() + def _to_json(self, data: Union[dict, list]) -> str: return json.dumps(data) @@ -226,40 +293,6 @@ def _send_original_version_session_recording_event( return event - # snapshot events are processed and altered during capture processing - def _make_processed_recording_event( - self, - event_data: dict | list[dict] | None = None, - session_id="abc123", - window_id="def456", - distinct_id="ghi789", - timestamp=1658516991883, - snapshot_bytes=60, - ) -> dict[str, Any]: - if event_data is None: - # event_data is an array of RRWeb events - event_data = [{"type": 3, "data": {"source": 1}}, {"type": 3, "data": {"source": 2}}] - - if isinstance(event_data, dict): - event_data = [event_data] - - return { - "event": "$snapshot_items", - "properties": { - # estimate of the size of the event data - "$snapshot_bytes": snapshot_bytes, - "$snapshot_items": event_data, - "$session_id": session_id, - "$window_id": window_id, - # snapshot events have the distinct id in the properties - # as well as at the top-level - "distinct_id": distinct_id, - "$snapshot_source": "web", - }, - "timestamp": timestamp, - "distinct_id": distinct_id, - } - def _send_august_2023_version_session_recording_event( self, number_of_events: int = 1, @@ -467,7 +500,7 @@ def test_capture_snapshot_event_too_large(self, kafka_produce: MagicMock) -> Non ) assert response.status_code == 200 - expected_data = self._make_processed_recording_event( + expected_data = make_processed_recording_event( snapshot_bytes=0, event_data=[ { @@ -2171,3 +2204,65 @@ def test_capture_historical_analytics_events_opt_in(self, kafka_produce) -> None kafka_produce.call_args_list[0][1]["topic"], KAFKA_EVENTS_PLUGIN_INGESTION_HISTORICAL, ) + + def test_capture_replay_to_bucket_when_random_number_is_less_than_sample_rate(self): + sample_rate = 0.001 + random_number = sample_rate / 2 + + with self.settings( + REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE=sample_rate, REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET=TEST_SAMPLES_BUCKET + ): + event = make_processed_recording_event( + session_id="abcdefgh", + snapshot_bytes=0, + event_data=[ + { + "type": 4, + "data": {"href": "https://keepme.io"}, + "$window_id": "the window id", + "timestamp": 1234567890, + }, + { + "type": 5, + "data": {"tag": "Message too large"}, + "timestamp": 1234567890, + "$window_id": "the window id", + }, + ], + ) + sample_replay_data_to_object_storage(event, random_number) + contents = object_storage.read("session_id/abcdefgh.json", bucket=TEST_SAMPLES_BUCKET) + assert contents == json.dumps(event) + + @parameterized.expand( + [ + ["does not write when random number is more than sample rate", 0.0001, 0.0002], + ["does not write when random number is less than sample rate but over max limit", 0.011, 0.001], + ] + ) + def test_capture_replay_does_not_write_to_bucket(self, _name: str, sample_rate: float, random_number: float): + with self.settings( + REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE=sample_rate, REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET=TEST_SAMPLES_BUCKET + ): + event = make_processed_recording_event( + session_id="abcdefgh", + snapshot_bytes=0, + event_data=[ + { + "type": 4, + "data": {"href": "https://keepme.io"}, + "$window_id": "the window id", + "timestamp": 1234567890, + }, + { + "type": 5, + "data": {"tag": "Message too large"}, + "timestamp": 1234567890, + "$window_id": "the window id", + }, + ], + ) + sample_replay_data_to_object_storage(event, random_number) + + with pytest.raises(ObjectStorageError): + object_storage.read("session_id/abcdefgh.json", bucket=TEST_SAMPLES_BUCKET) diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index fd265ea60b98d..34009fdf4ea12 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -2200,6 +2200,149 @@ def test_flag_with_regular_cohorts(self, *args): self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + def test_flag_with_invalid_cohort_filter_condition(self, *args): + self.team.app_urls = ["https://example.com"] + self.team.save() + self.client.logout() + + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 1716447600}, + ) + + # Create a cohort with an invalid filter condition (tis broken filter came from this issue: https://github.com/PostHog/posthog/issues/23213) + # The invalid condition is that the registration_ts property is compared against a list of values + # Since this filter must match everything, the flag should evaluate to False + cohort = Cohort.objects.create( + team=self.team, + filters={ + "properties": { + "type": "OR", + "values": [ + { + "type": "AND", + "values": [ + # This is the valid condition + { + "key": "registration_ts", + "type": "person", + "value": "1716274800", + "operator": "gte", + }, + # This is the invalid condition (lte operator comparing against a list of values) + { + "key": "registration_ts", + "type": "person", + "value": ["1716447600"], + "operator": "lte", + }, + ], + } + ], + } + }, + name="Test cohort", + ) + + # Create a feature flag that uses the cohort + FeatureFlag.objects.create( + team=self.team, + filters={ + "groups": [ + { + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort.pk, + } + ], + } + ] + }, + name="This is a cohort-based flag", + key="cohort-flag", + created_by=self.user, + ) + + with self.assertNumQueries(5): + response = self._post_decide(api_version=3, distinct_id=person1_distinct_id) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + + def test_flag_with_invalid_but_safe_cohort_filter_condition(self, *args): + self.team.app_urls = ["https://example.com"] + self.team.save() + self.client.logout() + + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 1716447600}, + ) + + # Create a cohort with a safe OR filter that contains an invalid condition + # it should still evaluate the FeatureFlag to True + cohort = Cohort.objects.create( + team=self.team, + filters={ + "properties": { + "type": "OR", + "values": [ + { + "type": "OR", + "values": [ + # This is the valid condition + { + "key": "registration_ts", + "type": "person", + "value": "1716274800", + "operator": "gte", + }, + # This is the invalid condition (lte operator comparing against a list of values) + { + "key": "registration_ts", + "type": "person", + "value": ["1716447600"], + "operator": "lte", + }, + ], + } + ], + } + }, + name="Test cohort", + ) + + # Create a feature flag that uses the cohort + FeatureFlag.objects.create( + team=self.team, + filters={ + "groups": [ + { + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort.pk, + } + ], + } + ] + }, + name="This is a cohort-based flag", + key="cohort-flag", + created_by=self.user, + ) + + with self.assertNumQueries(5): + response = self._post_decide(api_version=3, distinct_id=person1_distinct_id) + self.assertEqual(response.json()["featureFlags"], {"cohort-flag": True}) + self.assertEqual(response.json()["errorsWhileComputingFlags"], False) + def test_flag_with_unknown_cohort(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 3536f0b8e7352..03fccd5f3b730 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -86,139 +86,45 @@ def test_cant_create_flag_with_duplicate_key(self): def test_cant_create_flag_with_invalid_filters(self): count = FeatureFlag.objects.count() - response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags", - { - "name": "Beta feature", - "key": "beta-x", - "filters": { - "groups": [ - { - "rollout_percentage": 65, - "properties": [ - { - "key": "email", - "type": "person", - "value": ["@posthog.com"], - "operator": "icontains", - } - ], - } - ] - }, - }, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_value", - "detail": "Invalid value for operator icontains: ['@posthog.com']", - "attr": "filters", - }, - ) + invalid_operators = ["icontains", "regex", "not_icontains", "not_regex", "lt", "gt", "lte", "gte"] - response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags", - { - "name": "Beta feature", - "key": "beta-x", - "filters": { - "groups": [ - { - "rollout_percentage": 65, - "properties": [ - { - "key": "email", - "type": "person", - "value": ["@posthog.com"], - "operator": "regex", - } - ], - } - ] + for operator in invalid_operators: + response = self.client.post( + f"/api/projects/{self.team.id}/feature_flags", + { + "name": "Beta feature", + "key": "beta-x", + "filters": { + "groups": [ + { + "rollout_percentage": 65, + "properties": [ + { + "key": "email", + "type": "person", + "value": ["@posthog.com"], + "operator": operator, + } + ], + } + ] + }, }, - }, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_value", - "detail": "Invalid value for operator regex: ['@posthog.com']", - "attr": "filters", - }, - ) - - response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags", - { - "name": "Beta feature", - "key": "beta-x", - "filters": { - "groups": [ - { - "rollout_percentage": 65, - "properties": [ - { - "key": "email", - "type": "person", - "value": ["@posthog.com"], - "operator": "not_icontains", - } - ], - } - ] + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual( + response.json(), + { + "type": "validation_error", + "code": "invalid_value", + "detail": f"Invalid value for operator {operator}: ['@posthog.com']", + "attr": "filters", }, - }, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_value", - "detail": "Invalid value for operator not_icontains: ['@posthog.com']", - "attr": "filters", - }, - ) + ) - response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags", - { - "name": "Beta feature", - "key": "beta-x", - "filters": { - "groups": [ - { - "rollout_percentage": 65, - "properties": [ - { - "key": "email", - "type": "person", - "value": ["@posthog.com"], - "operator": "not_regex", - } - ], - } - ] - }, - }, - ) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual( - response.json(), - { - "type": "validation_error", - "code": "invalid_value", - "detail": "Invalid value for operator not_regex: ['@posthog.com']", - "attr": "filters", - }, - ) self.assertEqual(FeatureFlag.objects.count(), count) + # Test that a string value is still acceptable response = self.client.post( f"/api/projects/{self.team.id}/feature_flags", { @@ -241,6 +147,7 @@ def test_cant_create_flag_with_invalid_filters(self): }, }, ) + self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_cant_update_flag_with_duplicate_key(self): diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index adcd9b726ac65..317fa66755e8a 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -41,8 +41,11 @@ @lru_cache(maxsize=1) def default_settings() -> dict: + # https://clickhouse.com/blog/clickhouse-fully-supports-joins-how-to-choose-the-right-algorithm-part5 + # We default to three memory bound join operations, in decreasing order of speed + # The merge algorithms are not memory bound, and can be selectively used in places where it makes sense return { - "join_algorithm": "direct,parallel_hash", + "join_algorithm": "direct,parallel_hash,hash", "distributed_replica_max_ignored_errors": 1000, } diff --git a/posthog/clickhouse/migrations/0066_sessions_group_by.py b/posthog/clickhouse/migrations/0066_sessions_group_by.py new file mode 100644 index 0000000000000..628803f4beba9 --- /dev/null +++ b/posthog/clickhouse/migrations/0066_sessions_group_by.py @@ -0,0 +1,6 @@ +from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions +from posthog.models.raw_sessions.sql import RAW_SESSION_TABLE_UPDATE_SQL + +operations = [ + run_sql_with_exceptions(RAW_SESSION_TABLE_UPDATE_SQL), +] diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index 83fb6f0831b8c..08d4d062afcdf 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -1745,11 +1745,12 @@ -- replay false as maybe_has_session_replay FROM posthog_test.sharded_events - WHERE and( - bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7, -- has a session id and is valid uuidv7 - toYYYYMMDD(timestamp) >= 0 - ) - GROUP BY session_id_v7, team_id + WHERE bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7 -- has a session id and is valid uuidv7) + GROUP BY + team_id, + toStartOfHour(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(session_id_v7, 80)), 1000))), + cityHash64(session_id_v7), + session_id_v7 ''' diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index a273f1da6919d..02e29cbfad351 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -105,3 +105,4 @@ class HogQLGlobalSettings(HogQLQuerySettings): max_expanded_ast_elements: Optional[int] = 2_000_000 max_query_size: Optional[int] = 262144 * 4 # default value 262144 (= 256 KiB) max_bytes_before_external_group_by: Optional[int] = 0 # default value means we don't swap ordering by to disk + allow_experimental_analyzer: Optional[bool] = None diff --git a/posthog/hogql/database/schema/sessions_v1.py b/posthog/hogql/database/schema/sessions_v1.py index 14ab2b8f2b9ae..c95076f603fe7 100644 --- a/posthog/hogql/database/schema/sessions_v1.py +++ b/posthog/hogql/database/schema/sessions_v1.py @@ -157,16 +157,16 @@ def arg_max_merge_field(field_name: str) -> ast.Call: ) ], ), - "$entry_current_url": arg_min_merge_field("entry_url"), - "$exit_current_url": arg_max_merge_field("exit_url"), - "$entry_utm_source": arg_min_merge_field("initial_utm_source"), - "$entry_utm_campaign": arg_min_merge_field("initial_utm_campaign"), - "$entry_utm_medium": arg_min_merge_field("initial_utm_medium"), - "$entry_utm_term": arg_min_merge_field("initial_utm_term"), - "$entry_utm_content": arg_min_merge_field("initial_utm_content"), - "$entry_referring_domain": arg_min_merge_field("initial_referring_domain"), - "$entry_gclid": arg_min_merge_field("initial_gclid"), - "$entry_gad_source": arg_min_merge_field("initial_gad_source"), + "$entry_current_url": null_if_empty(arg_min_merge_field("entry_url")), + "$exit_current_url": null_if_empty(arg_max_merge_field("exit_url")), + "$entry_utm_source": null_if_empty(arg_min_merge_field("initial_utm_source")), + "$entry_utm_campaign": null_if_empty(arg_min_merge_field("initial_utm_campaign")), + "$entry_utm_medium": null_if_empty(arg_min_merge_field("initial_utm_medium")), + "$entry_utm_term": null_if_empty(arg_min_merge_field("initial_utm_term")), + "$entry_utm_content": null_if_empty(arg_min_merge_field("initial_utm_content")), + "$entry_referring_domain": null_if_empty(arg_min_merge_field("initial_referring_domain")), + "$entry_gclid": null_if_empty(arg_min_merge_field("initial_gclid")), + "$entry_gad_source": null_if_empty(arg_min_merge_field("initial_gad_source")), "$event_count_map": ast.Call( name="sumMap", args=[ast.Field(chain=[table_name, "event_count_map"])], @@ -420,3 +420,7 @@ def get_lazy_session_table_values_v1(key: str, search_term: Optional[str], team: return [["1"], ["0"]] return [] + + +def null_if_empty(expr: ast.Expr) -> ast.Call: + return ast.Call(name="nullIf", args=[expr, ast.Constant(value="")]) diff --git a/posthog/hogql/database/schema/sessions_v2.py b/posthog/hogql/database/schema/sessions_v2.py index d83b8dd3d1a84..7c3fd554bf9f7 100644 --- a/posthog/hogql/database/schema/sessions_v2.py +++ b/posthog/hogql/database/schema/sessions_v2.py @@ -18,6 +18,7 @@ LazyJoinToAdd, ) from posthog.hogql.database.schema.channel_type import create_channel_type_expr, POSSIBLE_CHANNEL_TYPES +from posthog.hogql.database.schema.sessions_v1 import null_if_empty from posthog.hogql.database.schema.util.where_clause_extractor import SessionMinTimestampWhereClauseExtractorV2 from posthog.hogql.errors import ResolutionError from posthog.models.property_definition import PropertyType @@ -185,20 +186,20 @@ def arg_max_merge_field(field_name: str) -> ast.Call: ) ], ), - "$entry_current_url": arg_min_merge_field("entry_url"), - "$end_current_url": arg_max_merge_field("end_url"), - "$entry_utm_source": arg_min_merge_field("initial_utm_source"), - "$entry_utm_campaign": arg_min_merge_field("initial_utm_campaign"), - "$entry_utm_medium": arg_min_merge_field("initial_utm_medium"), - "$entry_utm_term": arg_min_merge_field("initial_utm_term"), - "$entry_utm_content": arg_min_merge_field("initial_utm_content"), - "$entry_referring_domain": arg_min_merge_field("initial_referring_domain"), - "$entry_gclid": arg_min_merge_field("initial_gclid"), - "$entry_gad_source": arg_min_merge_field("initial_gad_source"), + "$entry_current_url": null_if_empty(arg_min_merge_field("entry_url")), + "$end_current_url": null_if_empty(arg_max_merge_field("end_url")), + "$entry_utm_source": null_if_empty(arg_min_merge_field("initial_utm_source")), + "$entry_utm_campaign": null_if_empty(arg_min_merge_field("initial_utm_campaign")), + "$entry_utm_medium": null_if_empty(arg_min_merge_field("initial_utm_medium")), + "$entry_utm_term": null_if_empty(arg_min_merge_field("initial_utm_term")), + "$entry_utm_content": null_if_empty(arg_min_merge_field("initial_utm_content")), + "$entry_referring_domain": null_if_empty(arg_min_merge_field("initial_referring_domain")), + "$entry_gclid": null_if_empty(arg_min_merge_field("initial_gclid")), + "$entry_gad_source": null_if_empty(arg_min_merge_field("initial_gad_source")), "$pageview_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "pageview_count"])]), "$screen_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "screen_count"])]), "$autocapture_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "autocapture_count"])]), - "$last_external_click_url": arg_max_merge_field("last_external_click_url"), + "$last_external_click_url": null_if_empty(arg_max_merge_field("last_external_click_url")), } # Alias aggregate_fields["id"] = aggregate_fields["session_id"] diff --git a/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py index 96230257b187d..4ed10becfd757 100644 --- a/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py +++ b/posthog/hogql/database/schema/util/test/test_session_where_clause_extractor.py @@ -478,21 +478,21 @@ def test_session_replay_query(self): ) expected = f"""SELECT s.session_id AS session_id, - min(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s)) AS start_time + min(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s)) AS start_time FROM session_replay_events AS s LEFT JOIN (SELECT - path(nullIf(argMinMerge(sessions.entry_url), %(hogql_val_0)s)) AS `$entry_pathname`, + path(nullIf(nullIf(argMinMerge(sessions.entry_url), %(hogql_val_0)s), %(hogql_val_1)s)) AS `$entry_pathname`, sessions.session_id AS session_id FROM sessions WHERE - and(equals(sessions.team_id, {self.team.id}), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_1)s), toIntervalDay(3)), %(hogql_val_2)s), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, %(hogql_val_3)s), toIntervalDay(3)), now64(6, %(hogql_val_4)s)), 0)) + and(equals(sessions.team_id, {self.team.id}), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, %(hogql_val_2)s), toIntervalDay(3)), %(hogql_val_3)s), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, %(hogql_val_4)s), toIntervalDay(3)), now64(6, %(hogql_val_5)s)), 0)) GROUP BY sessions.session_id, sessions.session_id) AS s__session ON equals(s.session_id, s__session.session_id) WHERE - and(equals(s.team_id, {self.team.id}), ifNull(equals(s__session.`$entry_pathname`, %(hogql_val_6)s), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), %(hogql_val_8)s), 0), ifNull(less(toTimeZone(s.min_first_timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), 0)) + and(equals(s.team_id, {self.team.id}), ifNull(equals(s__session.`$entry_pathname`, %(hogql_val_7)s), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_8)s), %(hogql_val_9)s), 0), ifNull(less(toTimeZone(s.min_first_timestamp, %(hogql_val_10)s), now64(6, %(hogql_val_11)s)), 0)) GROUP BY s.session_id LIMIT 50000""" diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py index f91187d1e091e..215677abe41ba 100644 --- a/posthog/hogql/database/test/test_database.py +++ b/posthog/hogql/database/test/test_database.py @@ -161,8 +161,8 @@ def test_serialize_database_warehouse_table_source(self): source=source, table=warehouse_table, should_sync=True, - status=ExternalDataSchema.Status.COMPLETED, last_synced_at="2024-01-01", + # No status but should be completed because a data warehouse table already exists ) database = create_hogql_database(team_id=self.team.pk) @@ -183,7 +183,7 @@ def test_serialize_database_warehouse_table_source(self): assert table.schema_.name == "table_1" assert table.schema_.should_sync is True assert table.schema_.incremental is False - assert table.schema_.status == "Completed" + assert table.schema_.status is None assert table.schema_.last_synced_at == "2024-01-01 00:00:00+00:00" field = table.fields.get("id") diff --git a/posthog/hogql/modifiers.py b/posthog/hogql/modifiers.py index 8deb1fffdae12..0fa3e390e1796 100644 --- a/posthog/hogql/modifiers.py +++ b/posthog/hogql/modifiers.py @@ -52,8 +52,8 @@ def set_default_modifier_values(modifiers: HogQLQueryModifiers, team: "Team"): if modifiers.bounceRatePageViewMode is None: modifiers.bounceRatePageViewMode = BounceRatePageViewMode.COUNT_PAGEVIEWS - if modifiers.sessionTableVersion is None or modifiers.sessionTableVersion == SessionTableVersion.AUTO: - modifiers.sessionTableVersion = SessionTableVersion.V1 + if modifiers.sessionTableVersion is None: + modifiers.sessionTableVersion = SessionTableVersion.AUTO def set_default_in_cohort_via(modifiers: HogQLQueryModifiers) -> HogQLQueryModifiers: diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index de99a9e4b085f..13bbf579c32e0 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -170,7 +170,11 @@ def property_to_expr( else: chain = ["properties"] - properties_field = ast.Field(chain=chain) + if property.type == "session": + properties_field = None + else: + properties_field = ast.Field(chain=chain) + field = ast.Field(chain=[*chain, property.key]) if isinstance(value, list): @@ -219,7 +223,7 @@ def property_to_expr( ] + ( [] - if properties_field == field + if not properties_field or properties_field == field else [ ast.Not( expr=ast.Call( diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py index 78950b7da644e..4cda01240f19e 100644 --- a/posthog/hogql_queries/actors_query_runner.py +++ b/posthog/hogql_queries/actors_query_runner.py @@ -3,9 +3,11 @@ from collections.abc import Sequence, Iterator from posthog.hogql import ast +from posthog.hogql.constants import HogQLGlobalSettings from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import has_aggregation from posthog.hogql_queries.actor_strategies import ActorStrategy, PersonStrategy, GroupStrategy +from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.insights.insight_actors_query_runner import InsightActorsQueryRunner from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator from posthog.hogql_queries.query_runner import QueryRunner, get_query_runner @@ -89,12 +91,21 @@ def prepare_recordings( return column_index_events, self.strategy.get_recordings(matching_events_list) def calculate(self) -> ActorsQueryResponse: + # Funnel queries require the experimental analyzer to run correctly + # Can remove once clickhouse moves to version 24.3 or above + settings = None + if isinstance(self.source_query_runner, InsightActorsQueryRunner) and isinstance( + self.source_query_runner.source_runner, FunnelsQueryRunner + ): + settings = HogQLGlobalSettings(allow_experimental_analyzer=True) + response = self.paginator.execute_hogql_query( query_type="ActorsQuery", query=self.to_query(), team=self.team, timings=self.timings, modifiers=self.modifiers, + settings=settings, ) input_columns = self.input_columns() missing_actors_count = None diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 3389b4e4d26f1..c6bb8f5149e16 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -1022,9 +1022,7 @@ def _get_step_counts_query(self, outer_select: list[ast.Expr], inner_select: lis ) ), group_by=group_by_columns, - having=ast.CompareOperation( - left=ast.Field(chain=["steps"]), right=ast.Field(chain=["max_steps"]), op=ast.CompareOperationOp.Eq - ), + having=parse_expr("steps = max(max_steps)"), ) def actor_query( diff --git a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py index 3ad62c08a1f66..a0e6404694502 100644 --- a/posthog/hogql_queries/insights/funnels/funnels_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnels_query_runner.py @@ -97,8 +97,10 @@ def calculate(self): modifiers=self.modifiers, limit_context=self.limit_context, settings=HogQLGlobalSettings( - max_bytes_before_external_group_by=MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY - ), # Make sure funnel queries never OOM + # Make sure funnel queries never OOM + max_bytes_before_external_group_by=MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY, + allow_experimental_analyzer=True, + ), ) results = self.funnel_class._format_results(response.results) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr index d53092966d4d1..9822999809a62 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr @@ -84,8 +84,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -93,7 +93,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_conversion_window_seconds.1 @@ -180,8 +181,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -219,8 +220,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -234,7 +235,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_events_with_person_on_events_v2 @@ -331,8 +333,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -340,7 +342,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_precalculated_cohort_step_filter @@ -418,8 +421,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -427,7 +430,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups @@ -527,8 +531,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -536,7 +540,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.1 @@ -635,8 +640,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -683,8 +688,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -698,7 +703,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.2 @@ -797,8 +803,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -845,8 +851,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -860,7 +866,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_property_groups.3 @@ -959,8 +966,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -1007,8 +1014,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-07-01 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'user signed up')), or(and(ifNull(ilike(e__pdi__person.properties___email, '%.com%'), 0), ifNull(equals(e__pdi__person.properties___age, '20'), 0)), or(ifNull(ilike(e__pdi__person.properties___email, '%.org%'), 0), ifNull(equals(e__pdi__person.properties___age, '28'), 0)))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -1022,7 +1029,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_funnel_with_static_cohort_step_filter @@ -1089,8 +1097,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -1098,7 +1106,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFOSSFunnel.test_timezones @@ -1153,8 +1162,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -1162,7 +1171,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen @@ -1248,8 +1258,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1259,7 +1269,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -1352,8 +1363,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1363,7 +1374,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -1449,8 +1461,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1460,7 +1472,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -1587,8 +1600,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1598,7 +1611,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -1725,8 +1739,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1736,7 +1750,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_breakdown_group @@ -1870,8 +1885,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1881,7 +1896,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr index ec72759642633..968e9f59ec233 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_breakdowns_by_current_url.ambr @@ -82,8 +82,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -93,7 +93,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelBreakdownsByCurrentURL.test_breakdown_by_pathname @@ -179,8 +180,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -190,6 +191,7 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr index ea5b333eec6d8..404380346de1c 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr @@ -70,8 +70,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) @@ -139,8 +139,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -220,8 +220,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors JOIN @@ -302,8 +302,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -425,8 +425,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -474,8 +474,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -619,8 +619,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -668,8 +668,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -813,8 +813,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -862,8 +862,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -1007,8 +1007,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1056,8 +1056,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1159,8 +1159,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors JOIN @@ -1241,8 +1241,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -1364,8 +1364,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -1413,8 +1413,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -1558,8 +1558,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1607,8 +1607,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Positive'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1752,8 +1752,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -1801,8 +1801,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -1946,8 +1946,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1995,8 +1995,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-14 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('paid', 'user signed up')), ifNull(equals(e__pdi__person.`properties___$browser`, 'Negative'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -2093,8 +2093,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) @@ -2156,8 +2156,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -2232,8 +2232,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) @@ -2295,8 +2295,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -2366,8 +2366,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) @@ -2428,8 +2428,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -2532,8 +2532,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -2648,8 +2648,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -2764,8 +2764,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -2880,8 +2880,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -2971,8 +2971,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) @@ -3041,8 +3041,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -3145,8 +3145,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -3261,8 +3261,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -3344,8 +3344,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) @@ -3406,8 +3406,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -3510,8 +3510,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -3626,8 +3626,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -3742,8 +3742,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -3858,8 +3858,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -3949,8 +3949,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) @@ -4019,8 +4019,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -4123,8 +4123,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -4239,8 +4239,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) @@ -4325,8 +4325,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -4399,8 +4399,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -4510,8 +4510,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -4633,8 +4633,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -4756,8 +4756,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -4879,8 +4879,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -4965,8 +4965,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -5039,8 +5039,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -5113,8 +5113,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -5187,8 +5187,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -5298,8 +5298,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -5421,8 +5421,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -5544,8 +5544,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -5667,8 +5667,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -5753,8 +5753,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -5827,8 +5827,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -5901,8 +5901,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -5975,8 +5975,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -6086,8 +6086,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -6209,8 +6209,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -6332,8 +6332,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -6455,8 +6455,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -6541,8 +6541,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -6615,8 +6615,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -6689,8 +6689,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -6763,8 +6763,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -6874,8 +6874,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -6997,8 +6997,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -7120,8 +7120,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -7243,8 +7243,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -7329,8 +7329,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -7403,8 +7403,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -7477,8 +7477,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -7551,8 +7551,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, @@ -7662,8 +7662,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -7785,8 +7785,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -7908,8 +7908,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -8031,8 +8031,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -8117,8 +8117,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LEFT JOIN @@ -8191,8 +8191,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors LIMIT 100 SETTINGS readonly=2, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr index 2e49e8d40d3c9..e939cd2b30bfa 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr @@ -105,8 +105,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -152,8 +152,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) @@ -360,8 +360,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) @@ -419,8 +419,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed', 'insight updated'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS funnel_actors ON equals(event__pdi.person_id, funnel_actors.actor_id) WHERE and(equals(event.team_id, 2), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 2), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) @@ -564,8 +564,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -613,8 +613,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('$pageview', 'insight analyzed')), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -758,8 +758,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -807,8 +807,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) @@ -952,8 +952,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) @@ -1001,8 +1001,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__pdi__person.properties___foo, 'bar'), 0)))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2]), 0) ORDER BY aggregation_target ASC) AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr index 02c3ed119fcad..2044dcbed3b96 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr @@ -150,8 +150,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -197,8 +197,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -212,7 +212,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.1 @@ -381,8 +382,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -428,8 +429,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -443,7 +444,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.3 @@ -612,8 +614,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(equals(steps, 2), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -659,8 +661,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(equals(steps, 2), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -674,7 +676,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelPersons.test_funnel_person_recordings.5 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index 4b80811bceb9f..453e2164d2150 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -81,8 +81,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -92,7 +92,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -184,8 +185,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -195,7 +196,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -280,8 +282,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -291,7 +293,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -395,8 +398,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -406,7 +409,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -510,8 +514,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -521,7 +525,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group @@ -632,8 +637,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -643,7 +648,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr index e16a91afc1da2..ce3fa166cffc8 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr @@ -110,8 +110,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -149,8 +149,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -164,7 +164,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.1 @@ -293,8 +294,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -332,8 +333,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -347,7 +348,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.3 @@ -476,8 +478,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(equals(steps, 2), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -515,8 +517,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(equals(steps, 2), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -530,7 +532,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.5 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr index 64e95bb36e38f..22a3489348248 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_time_to_convert.ambr @@ -89,8 +89,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time FROM (SELECT plus( @@ -124,8 +124,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, (SELECT histogram_params.from_seconds AS from_seconds FROM @@ -157,8 +157,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -190,8 +190,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -223,8 +223,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, count() AS person_count FROM @@ -303,8 +303,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs GROUP BY bin_from_seconds) AS results RIGHT OUTER JOIN (SELECT plus( @@ -338,8 +338,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -371,8 +371,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds FROM numbers(plus(ifNull( (SELECT histogram_params.bin_count AS bin_count @@ -405,8 +405,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, @@ -416,7 +416,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTimeToConvert.test_basic_strict @@ -490,8 +491,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time FROM (SELECT plus( @@ -520,8 +521,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, (SELECT histogram_params.from_seconds AS from_seconds FROM @@ -548,8 +549,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -576,8 +577,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -604,8 +605,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, count() AS person_count FROM @@ -665,8 +666,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs GROUP BY bin_from_seconds) AS results RIGHT OUTER JOIN (SELECT plus( @@ -695,8 +696,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -723,8 +724,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds FROM numbers(plus(ifNull( (SELECT histogram_params.bin_count AS bin_count @@ -752,8 +753,8 @@ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC')))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, @@ -763,7 +764,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTimeToConvert.test_basic_unordered @@ -925,8 +927,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params) AS average_conversion_time FROM (SELECT plus( @@ -987,8 +989,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(floor(divide(minus(step_runs.step_1_average_conversion_time_inner, (SELECT histogram_params.from_seconds AS from_seconds FROM @@ -1047,8 +1049,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params)), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -1107,8 +1109,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))), (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -1167,8 +1169,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds, count() AS person_count FROM @@ -1316,8 +1318,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs GROUP BY bin_from_seconds) AS results RIGHT OUTER JOIN (SELECT plus( @@ -1378,8 +1380,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), multiply(numbers.number, (SELECT histogram_params.bin_width_seconds AS bin_width_seconds FROM @@ -1438,8 +1440,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params))) AS bin_from_seconds FROM numbers(plus(ifNull( (SELECT histogram_params.bin_count AS bin_count @@ -1499,8 +1501,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-07 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-06-13 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) AS step_runs + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) AS step_runs WHERE isNotNull(step_runs.step_1_average_conversion_time_inner)) AS histogram_params), 0), 1)) AS numbers) AS fill ON equals(results.bin_from_seconds, fill.bin_from_seconds) ORDER BY fill.bin_from_seconds ASC LIMIT 100 SETTINGS readonly=2, @@ -1510,6 +1512,7 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr index 0d031da2b03a6..e02eb64d9a216 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends.ambr @@ -88,7 +88,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrends.test_timezones_trends.1 @@ -180,7 +181,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrends.test_week_interval @@ -272,7 +274,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrends.test_week_interval.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr index 42faebadeaf62..5b7df2345a412 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_persons.ambr @@ -195,7 +195,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_returns_recordings.1 @@ -409,7 +410,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off.1 @@ -623,7 +625,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr index f5cf1a29bdffa..778829c29b4da 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr @@ -135,8 +135,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -146,7 +146,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step @@ -299,8 +300,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -310,7 +311,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestFunnelUnorderedStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot @@ -449,8 +451,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -460,7 +462,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events @@ -694,8 +697,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -705,7 +708,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2 @@ -939,8 +943,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -950,7 +954,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group @@ -1205,8 +1210,8 @@ GROUP BY aggregation_target, steps, prop - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) GROUP BY prop) GROUP BY final_prop LIMIT 100 SETTINGS readonly=2, @@ -1216,7 +1221,8 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=23622320128 + max_bytes_before_external_group_by=23622320128, + allow_experimental_analyzer=1 ''' # --- # name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.1 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr index a0e57d8fac881..b610dfb3f779d 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr @@ -254,8 +254,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -337,8 +337,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [1, 2, 3]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id @@ -352,6 +352,7 @@ max_ast_elements=2000000, max_expanded_ast_elements=2000000, max_query_size=1048576, - max_bytes_before_external_group_by=0 + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 ''' # --- diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py index 6b9d9e74a7bbf..2085579b5ba0c 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py @@ -3816,7 +3816,7 @@ def test_smoke(self): aggregation_target, steps HAVING - equals(steps, max_steps) + equals(steps, max(max_steps)) LIMIT 100""", ) @@ -3887,6 +3887,6 @@ def test_smoke(self): aggregation_target, steps HAVING - equals(steps, max_steps)) + equals(steps, max(max_steps))) LIMIT 100""", ) diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py index 0fb294c6a64a4..7b86e305ca89e 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py @@ -1192,15 +1192,17 @@ def test_funnel_step_breakdown_empty(self): {"id": "step three", "order": 2}, ], "breakdown_type": "hogql", - "breakdown": "IF(rowNumberInAllBlocks() % 2 = 0, NULL, 'foo')", # Simulate some empty breakdown values + "breakdown": "IF(distinct_id = 'user_two', NULL, 'foo')", # Simulate some empty breakdown values } query = cast(FunnelsQuery, filter_to_query(filters)) results = FunnelsQueryRunner(query=query, team=self.team).calculate().results self.assertEqual(len(results), 2) - self.assertEqual(results[0]["breakdown_value"], ["None"]) + self.assertEqual(results[0]["breakdown_value"], [""]) + self.assertEqual(results[0]["data"], [0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) self.assertEqual(results[1]["breakdown_value"], ["foo"]) + self.assertEqual(results[1]["data"], [100.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) def test_funnel_step_breakdown_event_with_breakdown_limit(self): journeys_for( diff --git a/posthog/hogql_queries/insights/insight_actors_query_runner.py b/posthog/hogql_queries/insights/insight_actors_query_runner.py index 8b3e27df514a5..c07c1e3687ab0 100644 --- a/posthog/hogql_queries/insights/insight_actors_query_runner.py +++ b/posthog/hogql_queries/insights/insight_actors_query_runner.py @@ -1,6 +1,7 @@ from typing import cast, Optional from posthog.hogql import ast +from posthog.hogql.constants import HogQLGlobalSettings from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.insights.funnels.funnel_correlation_query_runner import FunnelCorrelationQueryRunner from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner @@ -107,6 +108,13 @@ def group_type_index(self) -> int | None: return None def calculate(self) -> HogQLQueryResponse: + settings = None + + # Funnel queries require the experimental analyzer to run correctly + # Can remove once clickhouse moves to version 24.3 or above + if isinstance(self.source_runner, FunnelsQueryRunner): + settings = HogQLGlobalSettings(allow_experimental_analyzer=True) + return execute_hogql_query( query_type="InsightActorsQuery", query=self.to_query(), @@ -114,4 +122,5 @@ def calculate(self) -> HogQLQueryResponse: timings=self.timings, modifiers=self.modifiers, limit_context=self.limit_context, + settings=settings, ) diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr index 0600e7e58e3b9..43bd72ebe2b82 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr @@ -52,8 +52,8 @@ WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2]), 0) ORDER BY aggregation_target ASC) AS source INNER JOIN @@ -88,8 +88,8 @@ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'US/Pacific')), lessOrEquals(toTimeZone(e.timestamp, 'US/Pacific'), toDateTime64('2020-01-19 23:59:59.999999', 6, 'US/Pacific'))), in(e.event, tuple('$pageview'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))) WHERE ifNull(equals(step_0, 1), 0))) GROUP BY aggregation_target, steps - HAVING ifNull(equals(steps, max_steps), isNull(steps) - and isNull(max_steps))) + HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) + and isNull(max(max_steps)))) WHERE ifNull(in(steps, [2]), 0) ORDER BY aggregation_target ASC) AS source))) GROUP BY person.id diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py index 4db5cc85d39c0..c4fc6ecc34a0c 100644 --- a/posthog/hogql_queries/test/test_query_runner.py +++ b/posthog/hogql_queries/test/test_query_runner.py @@ -90,7 +90,7 @@ def test_cache_payload(self): "optimizeJoinedFilters": False, "personsOnEventsMode": "disabled", "bounceRatePageViewMode": "count_pageviews", - "sessionTableVersion": "v1", + "sessionTableVersion": "auto", }, "limit_context": "query", "query": {"kind": "TestQuery", "some_attr": "bla"}, @@ -109,7 +109,7 @@ def test_cache_key(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) cache_key = runner.get_cache_key() - self.assertEqual(cache_key, "cache_19dbcf2dbe2bd72239f37007f2eb6224") + self.assertEqual(cache_key, "cache_c4e20e19f3cad552478257f71f80b52f") def test_cache_key_runner_subclass(self): TestQueryRunner = self.setup_test_query_runner_class() @@ -123,7 +123,7 @@ class TestSubclassQueryRunner(TestQueryRunner): runner = TestSubclassQueryRunner(query={"some_attr": "bla"}, team=team) cache_key = runner.get_cache_key() - self.assertEqual(cache_key, "cache_325bbd17cd27dd556d765984ba993da0") + self.assertEqual(cache_key, "cache_db0fcd4797812983cbf9df57cd9f3032") def test_cache_key_different_timezone(self): TestQueryRunner = self.setup_test_query_runner_class() @@ -134,7 +134,7 @@ def test_cache_key_different_timezone(self): runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) cache_key = runner.get_cache_key() - self.assertEqual(cache_key, "cache_c1d666350117520920dbc3cc9286195c") + self.assertEqual(cache_key, "cache_8c92e69a656cc68522e5b48a7304b97d") @mock.patch("django.db.transaction.on_commit") def test_cache_response(self, mock_on_commit): diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index 14b236da042dc..41f057f0ebb1e 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -24,6 +24,8 @@ PersonPropertyFilter, ) +BREAKDOWN_NULL_DISPLAY = "(none)" + class WebStatsTableQueryRunner(WebAnalyticsQueryRunner): query: WebStatsTableQuery @@ -468,8 +470,17 @@ def _counts_breakdown_value(self): case WebStatsBreakdown.INITIAL_CHANNEL_TYPE: return ast.Field(chain=["session", "$channel_type"]) case WebStatsBreakdown.INITIAL_UTM_SOURCE_MEDIUM_CAMPAIGN: - return parse_expr( - "concatWithSeparator(' / ', coalesce(nullIf(session.$entry_utm_source, ''), nullIf(session.$entry_referring_domain, ''), '(null)'), coalesce(nullIf(session.$entry_utm_medium, ''), '(null)'), coalesce(nullIf(session.$entry_utm_campaign, ''), '(null)'))" + return ast.Call( + name="concatWithSeparator", + args=[ + ast.Constant(value=" / "), + coalesce_with_null_display( + ast.Field(chain=["session", "$entry_utm_source"]), + ast.Field(chain=["session", "$entry_referring_domain"]), + ), + coalesce_with_null_display(ast.Field(chain=["session", "$entry_utm_medium"])), + coalesce_with_null_display(ast.Field(chain=["session", "$entry_utm_campaign"])), + ], ) case WebStatsBreakdown.BROWSER: return ast.Field(chain=["properties", "$browser"]) @@ -530,3 +541,7 @@ def _apply_path_cleaning(self, path_expr: ast.Expr) -> ast.Expr: ) return path_expr + + +def coalesce_with_null_display(*exprs: ast.Expr) -> ast.Expr: + return ast.Call(name="coalesce", args=[*exprs, ast.Constant(value=BREAKDOWN_NULL_DISPLAY)]) diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py index d3c9a7c49b961..4034d07a82d26 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py @@ -768,6 +768,109 @@ def test_source_medium_campaign(self, session_table_version: SessionTableVersion ).results self.assertEqual( - [["google / (null) / (null)", 1, 1], ["news.ycombinator.com / referral / (null)", 1, 1]], + [["google / (none) / (none)", 1, 1], ["news.ycombinator.com / referral / (none)", 1, 1]], + results, + ) + + @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]]) + def test_null_in_utm_tags(self, session_table_version: SessionTableVersion): + d1 = "d1" + s1 = str(uuid7("2024-06-26")) + + _create_person( + team_id=self.team.pk, + distinct_ids=[d1], + properties={ + "name": d1, + }, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id=d1, + timestamp="2024-06-26", + properties={"$session_id": s1, "utm_source": "google"}, + ) + + d2 = "d2" + s2 = str(uuid7("2024-06-26")) + _create_person( + team_id=self.team.pk, + distinct_ids=[d2], + properties={ + "name": d2, + }, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id=d2, + timestamp="2024-06-26", + properties={ + "$session_id": s2, + }, + ) + + results = self._run_web_stats_table_query( + "all", + "2024-06-27", + breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, + session_table_version=session_table_version, + ).results + + self.assertEqual( + [["google", 1.0, 1.0], [None, 1.0, 1.0]], + results, + ) + + @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]]) + def test_is_not_set_filter(self, session_table_version: SessionTableVersion): + d1 = "d1" + s1 = str(uuid7("2024-06-26")) + + _create_person( + team_id=self.team.pk, + distinct_ids=[d1], + properties={ + "name": d1, + }, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id=d1, + timestamp="2024-06-26", + properties={"$session_id": s1, "utm_source": "google"}, + ) + + d2 = "d2" + s2 = str(uuid7("2024-06-26")) + _create_person( + team_id=self.team.pk, + distinct_ids=[d2], + properties={ + "name": d2, + }, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id=d2, + timestamp="2024-06-26", + properties={ + "$session_id": s2, + }, + ) + + results = self._run_web_stats_table_query( + "all", + "2024-06-27", + breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, + properties=[EventPropertyFilter(key="utm_source", operator=PropertyOperator.IS_NOT_SET)], + session_table_version=session_table_version, + ).results + + self.assertEqual( + [[None, 1.0, 1.0]], results, ) diff --git a/posthog/migrations/0432_personlessdistinctid.py b/posthog/migrations/0432_personlessdistinctid.py new file mode 100644 index 0000000000000..96c6bcfc8717b --- /dev/null +++ b/posthog/migrations/0432_personlessdistinctid.py @@ -0,0 +1,32 @@ +# Generated by Django 4.2.11 on 2024-07-01 16:50 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0431_externaldataschema_sync_type_payload"), + ] + + operations = [ + migrations.CreateModel( + name="PersonlessDistinctId", + fields=[ + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("distinct_id", models.CharField(max_length=400)), + ("is_merged", models.BooleanField(default=False)), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "team", + models.ForeignKey(db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), + ], + ), + migrations.AddConstraint( + model_name="personlessdistinctid", + constraint=models.UniqueConstraint( + fields=("team", "distinct_id"), name="unique personless distinct_id for team" + ), + ), + ] diff --git a/posthog/models/action/util.py b/posthog/models/action/util.py index 1e09758c43803..8fb0339075ed8 100644 --- a/posthog/models/action/util.py +++ b/posthog/models/action/util.py @@ -2,9 +2,9 @@ from typing import Literal, Optional from collections import Counter as TCounter -from posthog.constants import AUTOCAPTURE_EVENT, TREND_FILTER_TYPE_ACTIONS +from posthog.constants import AUTOCAPTURE_EVENT from posthog.hogql.hogql import HogQLContext -from posthog.models import Entity, Filter +from posthog.models import Filter from posthog.models.action import Action from posthog.models.action.action import ActionStepJSON from posthog.models.property import Property, PropertyIdentifier @@ -149,35 +149,6 @@ def filter_event( return conditions, params -def format_entity_filter( - team_id: int, - entity: Entity, - hogql_context: HogQLContext, - person_id_joined_alias: str, - prepend: str = "action", - filter_by_team=True, -) -> tuple[str, dict]: - if entity.type == TREND_FILTER_TYPE_ACTIONS: - action = entity.get_action() - entity_filter, params = format_action_filter( - team_id=team_id, - action=action, - prepend=prepend, - filter_by_team=filter_by_team, - person_id_joined_alias=person_id_joined_alias, - hogql_context=hogql_context, - ) - elif entity.id is None: - entity_filter = "1 = 1" - params = {} - else: - key = f"{prepend}_event" - entity_filter = f"event = %({key})s" - params = {key: entity.id} - - return entity_filter, params - - def get_action_tables_and_properties(action: Action) -> TCounter[PropertyIdentifier]: from posthog.models.property.util import extract_tables_and_properties diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 04594a21b0f0e..99b4b95de8e77 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -357,16 +357,6 @@ AND "posthog_person"."team_id" = 2) ''' # --- -# name: TestDjangoPropertiesToQ.test_icontains_with_array_value.2 - ''' - SELECT "posthog_person"."uuid" - FROM "posthog_person" - WHERE (("posthog_person"."properties" -> '$key') > '["2"]'::jsonb - AND "posthog_person"."properties" ? '$key' - AND NOT (("posthog_person"."properties" -> '$key') = 'null'::jsonb) - AND "posthog_person"."team_id" = 2) - ''' -# --- # name: TestDjangoPropertiesToQ.test_person_relative_date_parsing_with_invalid_date ''' SELECT 1 AS "a" diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py index eb99a3ac42941..d7901c721e96a 100644 --- a/posthog/models/filters/test/test_filter.py +++ b/posthog/models/filters/test/test_filter.py @@ -796,6 +796,218 @@ def test_person_relative_date_parsing(self): ) self.assertTrue(matched_person) + def test_person_matching_greater_than_filter(self): + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 5}, + ) + filter = Filter( + data={"properties": [{"key": "registration_ts", "value": "4", "type": "person", "operator": "gt"}]} + ) + + with self.assertNumQueries(1): + matched_person = ( + Person.objects.annotate( + **{ + "properties_registrationts_68f210b8c014e1b_type": Func( + F("properties__registration_ts"), + function="JSONB_TYPEOF", + output_field=CharField(), + ) + } + ) + .filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) + .filter(properties_to_Q(self.team.pk, filter.property_groups.flat)) + .exists() + ) + self.assertTrue(matched_person) + + def test_broken_person_filter_never_matching(self): + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 1716447600}, + ) + # This broken filter came from this issue: https://github.com/PostHog/posthog/issues/23213 + filter = Filter( + data={ + "properties": { + "type": "OR", + "values": [ + { + "type": "AND", + "values": [ + # This is the valid condition + { + "key": "registration_ts", + "type": "person", + "value": "1716274800", + "negation": False, + "operator": "gte", + }, + # This is the invalid condition (lte operator comparing against a list of values) + { + "key": "registration_ts", + "type": "person", + "value": ["1716447600"], + "negation": False, + "operator": "lte", + }, + ], + } + ], + } + } + ) + + with self.assertNumQueries(1): + matched_person = ( + Person.objects.annotate( + **{ + "properties_registrationts_68f210b8c014e1b_type": Func( + F("properties__registration_ts"), + function="JSONB_TYPEOF", + output_field=CharField(), + ) + } + ) + .filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) + .filter(properties_to_Q(self.team.pk, filter.property_groups.flat)) + .exists() + ) + # This shouldn't pass because we have an AND condition with a broken lte operator + # (we should never have a lte operator comparing against a list of values) + # So this should never match + self.assertFalse(matched_person) + + def test_broken_condition_does_not_break_entire_filter(self): + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 1716447600}, + ) + # Create a cohort with an OR filter that has an invalid condition + # (a lte operator comparing against a list of values) + # This should still evaluate to True, though, because the other condition is valid + cohort = Cohort.objects.create( + team=self.team, + name="Test OR Cohort", + filters={ + "properties": { + "type": "OR", + "values": [ + { + "type": "OR", + # This is the valid condition + "values": [ + { + "key": "registration_ts", + "type": "person", + "value": "1716274800", + "negation": False, + "operator": "gte", + }, + # This is the invalid condition + { + "key": "registration_ts", + "type": "person", + "value": ["1716447600"], + "negation": False, + "operator": "lte", + }, + ], + } + ], + } + }, + ) + filter = Filter(data={"properties": [{"key": "id", "value": cohort.pk, "type": "cohort"}]}) + with self.assertNumQueries(2): + matched_person = ( + Person.objects.annotate( + **{ + "properties_registrationts_68f210b8c014e1b_type": Func( + F("properties__registration_ts"), + function="JSONB_TYPEOF", + output_field=CharField(), + ) + } + ) + .filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) + .filter(properties_to_Q(self.team.pk, filter.property_groups.flat)) + .exists() + ) + # This should now pass because the cohort filter still has one valid condition + self.assertTrue(matched_person) + + def test_person_matching_real_filter(self): + person1_distinct_id = "example_id" + Person.objects.create( + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"registration_ts": 1716447600}, + ) + filter = Filter( + data={ + "properties": { + "type": "OR", + "values": [ + { + "type": "AND", + "values": [ + { + "key": "registration_ts", + "type": "person", + "value": "1716274800", + "negation": False, + "operator": "gt", + }, + { + "key": "registration_ts", + "type": "person", + "value": ["1716447600"], + "negation": False, + "operator": "exact", + }, + ], + } + ], + } + } + ) + with self.assertNumQueries(1): + matched_person = ( + Person.objects.annotate( + **{ + "properties_registrationts_68f210b8c014e1b_type": Func( + F("properties__registration_ts"), + function="JSONB_TYPEOF", + output_field=CharField(), + ) + } + ) + .filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) + .filter(properties_to_Q(self.team.pk, filter.property_groups.flat)) + .exists() + ) + self.assertTrue(matched_person) + def test_person_relative_date_parsing_with_override_property(self): person1_distinct_id = "example_id" Person.objects.create( @@ -978,20 +1190,6 @@ def filter_persons_with_annotation(filter: Filter, team: Team): ) self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 1) - filter = Filter( - data={ - "properties": [ - { - "type": "person", - "key": "$key", - "value": ["2"], - "operator": "gt", - } - ] - } - ) - self.assertEqual(len(filter_persons_with_annotation(filter, self.team)), 0) - def filter_persons_with_property_group( filter: Filter, team: Team, property_overrides: Optional[dict[str, Any]] = None diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py index 72a5bd7c79948..7ccd7acabc650 100644 --- a/posthog/models/person/person.py +++ b/posthog/models/person/person.py @@ -128,6 +128,19 @@ class Meta: version: models.BigIntegerField = models.BigIntegerField(null=True, blank=True) +class PersonlessDistinctId(models.Model): + class Meta: + constraints = [ + models.UniqueConstraint(fields=["team", "distinct_id"], name="unique personless distinct_id for team") + ] + + id: models.BigAutoField = models.BigAutoField(primary_key=True) + team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, db_index=False) + distinct_id: models.CharField = models.CharField(max_length=400) + is_merged: models.BooleanField = models.BooleanField(default=False) + created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) + + class PersonOverrideMapping(models.Model): """A model of persons to be overriden in merge or merge-like events.""" diff --git a/posthog/models/raw_sessions/sql.py b/posthog/models/raw_sessions/sql.py index 184ff625cfd00..9a144f278c2e8 100644 --- a/posthog/models/raw_sessions/sql.py +++ b/posthog/models/raw_sessions/sql.py @@ -5,13 +5,6 @@ ReplicationScheme, AggregatingMergeTree, ) -from posthog.settings import TEST - -# the date of the day after this PR will be merged, this allows us to run the backfill script on complete days -# with a condition like toYYYYMMDD(timestamp) < X -INGEST_FROM_DATE = "toYYYYMMDD(timestamp) >= 20240626" -if TEST: - INGEST_FROM_DATE = "toYYYYMMDD(timestamp) >= 0" TABLE_BASE_NAME = "raw_sessions" RAW_SESSIONS_DATA_TABLE = lambda: f"sharded_{TABLE_BASE_NAME}" @@ -329,11 +322,12 @@ def source_int_column(column_name: str) -> str: -- replay false as maybe_has_session_replay FROM {database}.sharded_events -WHERE and( - bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7, -- has a session id and is valid uuidv7 - {INGEST_FROM_DATE} -) -GROUP BY session_id_v7, team_id +WHERE bitAnd(bitShiftRight(toUInt128(accurateCastOrNull(`$session_id`, 'UUID')), 76), 0xF) == 7 -- has a session id and is valid uuidv7) +GROUP BY + team_id, + toStartOfHour(fromUnixTimestamp(intDiv(toUInt64(bitShiftRight(session_id_v7, 80)), 1000))), + cityHash64(session_id_v7), + session_id_v7 """.format( database=settings.CLICKHOUSE_DATABASE, current_url=source_url_column("$current_url"), @@ -370,7 +364,6 @@ def source_int_column(column_name: str) -> str: mc_cid=source_string_column("mc_cid"), igshid=source_string_column("igshid"), ttclid=source_string_column("ttclid"), - INGEST_FROM_DATE=INGEST_FROM_DATE, ) ) diff --git a/posthog/queries/base.py b/posthog/queries/base.py index e5cf6e717444b..3d7f52e7b98d8 100644 --- a/posthog/queries/base.py +++ b/posthog/queries/base.py @@ -240,9 +240,16 @@ def empty_or_null_with_value_q( else: parsed_value = None if operator in ("gt", "gte", "lt", "lte"): + if isinstance(value, list): + # If the value is a list for these operators, + # we should not return any results, as we can't compare a list to a single value + # TODO: should we try and parse each value in the list and return results based on that? + return Q(pk__isnull=True) + + # At this point, we know that the value is not a list, so we can safely parse it + # There might still be exceptions, but we're catching them below try: - # try to parse even if arrays can't be parsed, the catch will handle it - parsed_value = float(value) # type: ignore + parsed_value = float(value) except Exception: pass diff --git a/posthog/schema.py b/posthog/schema.py index 38e3643d3a3b6..c46df6df52699 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -241,7 +241,7 @@ class DatabaseSchemaSchema(BaseModel): last_synced_at: Optional[str] = None name: str should_sync: bool - status: str + status: Optional[str] = None class DatabaseSchemaSource(BaseModel): diff --git a/posthog/session_recordings/queries/session_recording_list_from_filters.py b/posthog/session_recordings/queries/session_recording_list_from_filters.py index 97ab77c755036..24d16e0f2b415 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/session_recording_list_from_filters.py @@ -6,6 +6,7 @@ from posthog.hogql.ast import Constant, CompareOperation from posthog.hogql.parser import parse_select from posthog.hogql.property import entity_to_expr, property_to_expr +from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator from posthog.models import Team, Property from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter @@ -152,7 +153,8 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: if person_id_compare_operation: exprs.append(person_id_compare_operation) - if self._filter.session_ids: + # we check for session_ids type not for truthiness since we want to allow empty lists + if isinstance(self._filter.session_ids, list): exprs.append( ast.CompareOperation( op=ast.CompareOperationOp.In, @@ -181,7 +183,7 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: optional_exprs: list[ast.Expr] = [] # if in PoE mode then we should be pushing person property queries into here - events_sub_query = EventsSubQuery(self._team, self._filter, self.ttl_days).get_query() + events_sub_query = ReplayFiltersEventsSubQuery(self._team, self._filter).get_query_for_session_id_matching() if events_sub_query: optional_exprs.append( ast.CompareOperation( @@ -193,7 +195,7 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: # we want to avoid a join to persons since we don't ever need to select from them, # so we create our own persons sub query here - # if PoE mode is on then this will be handled in the events subquery and we don't need to do anything here + # if PoE mode is on then this will be handled in the events subquery, and we don't need to do anything here person_subquery = PersonsPropertiesSubQuery(self._team, self._filter, self.ttl_days).get_query() if person_subquery: optional_exprs.append( @@ -426,15 +428,23 @@ def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: ) -class EventsSubQuery: +class ReplayFiltersEventsSubQuery: _team: Team _filter: SessionRecordingsFilter - _ttl_days: int - def __init__(self, team: Team, filter: SessionRecordingsFilter, ttl_days: int): + @property + def ttl_days(self): + return ttl_days(self._team) + + def __init__( + self, + team: Team, + filter: SessionRecordingsFilter, + hogql_query_modifiers: Optional[HogQLQueryModifiers] = None, + ): self._team = team self._filter = filter - self._ttl_days = ttl_days + self._hogql_query_modifiers = hogql_query_modifiers @cached_property def _event_predicates(self): @@ -459,19 +469,43 @@ def _event_predicates(self): return event_exprs, list(event_names) - def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: + def _select_from_events(self, select_expr: ast.Expr) -> ast.SelectQuery: + return ast.SelectQuery( + select=[select_expr], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), + where=self._where_predicates(), + having=self._having_predicates(), + group_by=[ast.Field(chain=["$session_id"])], + ) + + def get_query_for_session_id_matching(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: use_poe = poe_is_active(self._team) and self.person_properties if self._filter.entities or self.event_properties or use_poe: - return ast.SelectQuery( - select=[ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))], - select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), - where=self._where_predicates(), - having=self._having_predicates(), - group_by=[ast.Field(chain=["$session_id"])], - ) + return self._select_from_events(ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))) else: return None + def get_query_for_event_id_matching(self) -> ast.SelectQuery | ast.SelectUnionQuery: + return self._select_from_events(ast.Call(name="groupUniqArray", args=[ast.Field(chain=["uuid"])])) + + def get_event_ids_for_session(self) -> SessionRecordingQueryResult: + query = self.get_query_for_event_id_matching() + + hogql_query_response = execute_hogql_query( + query=query, + team=self._team, + query_type="SessionRecordingMatchingEventsForSessionQuery", + modifiers=self._hogql_query_modifiers, + ) + + flattened_results = [str(uuid) for row in hogql_query_response.results for uuid in row[0]] + + return SessionRecordingQueryResult( + results=flattened_results, + has_more_recording=False, + timings=hogql_query_response.timings, + ) + def _where_predicates(self) -> ast.Expr: exprs: list[ast.Expr] = [ ast.Call( @@ -482,7 +516,7 @@ def _where_predicates(self) -> ast.Expr: ast.CompareOperation( op=ast.CompareOperationOp.GtEq, left=ast.Field(chain=["timestamp"]), - right=ast.Constant(value=datetime.now() - timedelta(days=self._ttl_days)), + right=ast.Constant(value=datetime.now() - timedelta(days=self.ttl_days)), ), ast.CompareOperation( op=ast.CompareOperationOp.LtEq, diff --git a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py deleted file mode 100644 index 82db40b4e660b..0000000000000 --- a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py +++ /dev/null @@ -1,785 +0,0 @@ -import dataclasses -import re -from datetime import datetime, timedelta -from typing import Any, Literal, NamedTuple, Union - -from django.conf import settings -from sentry_sdk import capture_exception - -from posthog.client import sync_execute -from posthog.constants import TREND_FILTER_TYPE_ACTIONS, PropertyOperatorType -from posthog.models import Entity, Team -from posthog.models.action.util import format_entity_filter -from posthog.models.filters.mixins.utils import cached_property -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter -from posthog.models.property import PropertyGroup -from posthog.models.property.util import parse_prop_grouped_clauses -from posthog.models.team import PersonsOnEventsMode -from posthog.queries.event_query import EventQuery -from posthog.queries.util import PersonPropertiesMode -from posthog.session_recordings.queries.session_replay_events import ttl_days - - -@dataclasses.dataclass(frozen=True) -class SummaryEventFiltersSQL: - having_conditions: str - having_select: str - where_conditions: str - params: dict[str, Any] - - -class SessionRecordingQueryResult(NamedTuple): - results: list - has_more_recording: bool - - -def _get_recording_start_time_clause(recording_filters: SessionRecordingsFilter) -> tuple[str, dict[str, Any]]: - start_time_clause = "" - start_time_params = {} - if recording_filters.date_from: - start_time_clause += "\nAND start_time >= %(start_time)s" - start_time_params["start_time"] = recording_filters.date_from - if recording_filters.date_to: - start_time_clause += "\nAND start_time <= %(end_time)s" - start_time_params["end_time"] = recording_filters.date_to - return start_time_clause, start_time_params - - -def _get_order_by_clause(filter_order: str | None) -> str: - order_by = filter_order or "start_time" - return f"ORDER BY {order_by} DESC" - - -def _get_filter_by_log_text_session_ids_clause( - team: Team, recording_filters: SessionRecordingsFilter, column_name="session_id" -) -> tuple[str, dict[str, Any]]: - if not recording_filters.console_search_query: - return "", {} - - log_query, log_params = LogQuery(team=team, filter=recording_filters).get_query() - - # we return this _even_ if there are no matching ids since if there are no matching ids - # then no sessions can match... - # sorted so that snapshots are consistent - return f'AND "{column_name}" in ({log_query}) as log_text_matching', log_params - - -def _get_filter_by_provided_session_ids_clause( - recording_filters: SessionRecordingsFilter, column_name="session_id" -) -> tuple[str, dict[str, Any]]: - if recording_filters.session_ids is None: - return "", {} - - return f'AND "{column_name}" in %(session_ids)s', {"session_ids": recording_filters.session_ids} - - -class LogQuery: - _filter: SessionRecordingsFilter - _team_id: int - _team: Team - - def __init__( - self, - team: Team, - filter: SessionRecordingsFilter, - ): - self._filter = filter - self._team = team - self._team_id = team.pk - - _rawQuery = """ - SELECT distinct log_source_id as session_id - FROM log_entries - PREWHERE team_id = %(team_id)s - -- regardless of what other filters are applied - -- limit by storage TTL - AND timestamp >= %(clamped_to_storage_ttl)s - -- make sure we don't get the occasional unexpected future event - AND timestamp <= now() - -- and then any time filter for the events query - {events_timestamp_clause} - WHERE 1=1 - {console_log_clause} - AND positionCaseInsensitive(message, %(console_search_query)s) > 0 - """ - - @property - def ttl_days(self): - return ttl_days(self._team) - - @cached_property - def _get_events_timestamp_clause(self) -> tuple[str, dict[str, Any]]: - timestamp_clause = "" - timestamp_params = {} - if self._filter.date_from: - timestamp_clause += "\nAND timestamp >= %(event_start_time)s" - timestamp_params["event_start_time"] = self._filter.date_from - timedelta(minutes=2) - if self._filter.date_to: - timestamp_clause += "\nAND timestamp <= %(event_end_time)s" - timestamp_params["event_end_time"] = self._filter.date_to - return timestamp_clause, timestamp_params - - @staticmethod - def _get_console_log_clause( - console_logs_filter: list[Literal["error", "warn", "info"]], - ) -> tuple[str, dict[str, Any]]: - return ( - ( - f"AND level in %(console_logs_levels)s", - {"console_logs_levels": console_logs_filter}, - ) - if console_logs_filter - else ("", {}) - ) - - def get_query(self) -> tuple[str, dict]: - if not self._filter.console_search_query: - return "", {} - - ( - events_timestamp_clause, - events_timestamp_params, - ) = self._get_events_timestamp_clause - console_log_clause, console_log_params = self._get_console_log_clause(self._filter.console_logs_filter) - - return self._rawQuery.format( - events_timestamp_clause=events_timestamp_clause, - console_log_clause=console_log_clause, - ), { - "team_id": self._team_id, - "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)), - "console_search_query": self._filter.console_search_query, - **events_timestamp_params, - **console_log_params, - } - - -class ActorsQuery(EventQuery): - _filter: SessionRecordingsFilter - - def __init__( - self, - team: Team, - **kwargs, - ): - person_on_events_mode = team.person_on_events_mode - super().__init__( - **kwargs, - team=team, - person_on_events_mode=person_on_events_mode, - ) - - # we have to implement this from EventQuery but don't need it - def _determine_should_join_distinct_ids(self) -> None: - pass - - # we have to implement this from EventQuery but don't need it - def _data_to_return(self, results: list[Any]) -> list[dict[str, Any]]: - pass - - _raw_persons_query = """ - SELECT distinct_id, argMax(person_id, version) as current_person_id - {select_person_props} - FROM person_distinct_id2 as pdi - {filter_persons_clause} - WHERE team_id = %(team_id)s - {prop_filter_clause} - {all_distinct_ids_that_might_match_a_person} - GROUP BY distinct_id - HAVING - argMax(is_deleted, version) = 0 - {prop_having_clause} - {filter_by_person_uuid_condition} - """ - - def get_query(self) -> tuple[str, dict[str, Any]]: - # we don't support PoE V1 - hopefully that's ok - if self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS: - return "", {} - - prop_query, prop_params = self._get_prop_groups( - PropertyGroup( - type=PropertyOperatorType.AND, - values=[g for g in self._filter.property_groups.flat if g.type == "person" or "cohort" in g.type], - ), - person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id", - ) - - # hogql person props queries rely on an aggregated column and so have to go in the having clause - # not the where clause - having_prop_query, having_prop_params = self._get_prop_groups( - PropertyGroup( - type=PropertyOperatorType.AND, - values=[ - g for g in self._filter.property_groups.flat if g.type == "hogql" and "person.properties" in g.key - ], - ), - person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.current_person_id", - ) - - person_query, person_query_params = self._get_person_query() - should_join_persons = self._filter.person_uuid or person_query - - if not should_join_persons: - return "", {} - else: - filter_persons_clause = person_query or "" - filter_by_person_uuid_condition = ( - "and current_person_id = %(person_uuid)s" if self._filter.person_uuid else "" - ) - all_distinct_ids_that_might_match_a_person = ( - """ - AND distinct_id IN ( - SELECT distinct_id - FROM person_distinct_id2 - WHERE team_id = %(team_id)s - AND person_id = %(person_uuid)s) as all_distinct_ids_that_might_match_a_person - """ - if self._filter.person_uuid - else "" - ) - - return self._raw_persons_query.format( - filter_persons_clause=filter_persons_clause, - select_person_props=( - ", argMax(person_props, version) as person_props" if "person_props" in filter_persons_clause else "" - ), - prop_filter_clause=prop_query, - prop_having_clause=having_prop_query, - filter_by_person_uuid_condition=filter_by_person_uuid_condition, - all_distinct_ids_that_might_match_a_person=all_distinct_ids_that_might_match_a_person, - ), { - "team_id": self._team_id, - **person_query_params, - "person_uuid": self._filter.person_uuid, - **prop_params, - **having_prop_params, - } - - -class SessionIdEventsQuery(EventQuery): - _filter: SessionRecordingsFilter - - def __init__( - self, - team: Team, - **kwargs, - ): - person_on_events_mode = team.person_on_events_mode - super().__init__( - **kwargs, - team=team, - person_on_events_mode=person_on_events_mode, - ) - - # we have to implement this from EventQuery but don't need it - def _determine_should_join_distinct_ids(self) -> None: - pass - - # we have to implement this from EventQuery but don't need it - def _data_to_return(self, results: list[Any]) -> list[dict[str, Any]]: - pass - - def _determine_should_join_events(self): - filters_by_event_or_action = self._filter.entities and len(self._filter.entities) > 0 - # for e.g. test account filters might have event properties without having an event or action filter - has_event_property_filters = ( - len( - [ - pg - for pg in self._filter.property_groups.flat - # match when it is an event property filter - # or if its hogql and the key contains "properties." but not "person.properties." - # it's ok to match if there's both "properties." and "person.properties." in the key - # but not when its only "person.properties." - if pg.type == "event" or pg.type == "hogql" and re.search(r"(? 0 - ) - - has_poe_filters = ( - self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS - and len( - [ - pg - for pg in self._filter.property_groups.flat - if pg.type == "person" or (pg.type == "hogql" and "person.properties" in pg.key) - ] - ) - > 0 - ) - - has_poe_person_filter = ( - self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS - and self._filter.person_uuid - ) - - return filters_by_event_or_action or has_event_property_filters or has_poe_filters or has_poe_person_filter - - @property - def ttl_days(self): - return ttl_days(self._team) - - _raw_events_query = """ - {context_comment} - SELECT - {select_event_ids} - {event_filter_having_events_select} - `$session_id` - FROM events e - {groups_query} - -- sometimes we have to join on persons so we can access e.g. person_props in filters - {persons_join} - PREWHERE - team_id = %(team_id)s - -- regardless of what other filters are applied - -- limit by storage TTL - AND e.timestamp >= %(clamped_to_storage_ttl)s - -- make sure we don't get the occasional unexpected future event - AND e.timestamp <= now() - -- and then any time filter for the events query - {events_timestamp_clause} - WHERE - notEmpty(`$session_id`) - {event_filter_where_conditions} - {prop_filter_clause} - {provided_session_ids_clause} - -- other times we can check distinct id against a sub query which should be faster than joining - {persons_sub_query} - GROUP BY `$session_id` - HAVING 1=1 {event_filter_having_events_condition} - """ - - def format_event_filter(self, entity: Entity, prepend: str, team_id: int) -> tuple[str, dict[str, Any]]: - filter_sql, params = format_entity_filter( - team_id=team_id, - entity=entity, - prepend=prepend, - filter_by_team=False, - person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id", - hogql_context=self._filter.hogql_context, - ) - - filters, filter_params = parse_prop_grouped_clauses( - team_id=team_id, - property_group=entity.property_groups, - prepend=prepend, - allow_denormalized_props=True, - has_person_id_joined=True, - person_properties_mode=( - PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2 - if self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS - else PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN - ), - hogql_context=self._filter.hogql_context, - ) - filter_sql += f" {filters}" - params = {**params, **filter_params} - - return filter_sql, params - - @cached_property - def build_event_filters(self) -> SummaryEventFiltersSQL: - event_names_to_filter: list[Union[int, str]] = [] - params: dict = {} - condition_sql = "" - - for index, entity in enumerate(self._filter.entities): - if entity.type == TREND_FILTER_TYPE_ACTIONS: - action = entity.get_action() - # NOTE: Do we need a short circuit here for "none" - i.e. all events? - event_names_to_filter.extend( - [ae for ae in action.get_step_events() if ae and ae not in event_names_to_filter] - ) - else: - if entity.id and entity.id not in event_names_to_filter: - event_names_to_filter.append(entity.id) - - ( - this_entity_condition_sql, - this_entity_filter_params, - ) = self.format_event_filter(entity, prepend=f"event_matcher_{index}", team_id=self._team_id) - joining = "OR" if index > 0 else "" - condition_sql += f"{joining} {this_entity_condition_sql}" - # wrap in smooths to constrain the scope of the OR - condition_sql = f"( {condition_sql} )" - params = {**params, **this_entity_filter_params} - - params = {**params, "event_names": list(event_names_to_filter)} - - if len(event_names_to_filter) == 0: - # using "All events" - having_conditions = "" - having_select = "" - else: - having_conditions = "AND hasAll(event_names, %(event_names)s)" - having_select = """ - -- select the unique events in this session to support filtering sessions by presence of an event - groupUniqArray(event) as event_names,""" - - if self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS: - person_id_clause, person_id_params = self._get_person_id_clause - condition_sql += person_id_clause - params = {**params, **person_id_params} - - condition_sql = ( - f" AND {condition_sql}" if condition_sql and not condition_sql.startswith("AND") else condition_sql - ) - - return SummaryEventFiltersSQL( - having_conditions=having_conditions, - having_select=having_select, - where_conditions=f"{condition_sql}" if condition_sql else "", - params=params, - ) - - def _get_groups_query(self) -> tuple[str, dict]: - try: - from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery - except ImportError: - # if EE not available then we use a no-op version - from posthog.queries.groups_join_query import GroupsJoinQuery - - return GroupsJoinQuery( - self._filter, - self._team_id, - self._column_optimizer, - person_on_events_mode=self._person_on_events_mode, - ).get_join_query() - - # We want to select events beyond the range of the recording to handle the case where - # a recording spans the time boundaries - @cached_property - def _get_events_timestamp_clause(self) -> tuple[str, dict[str, Any]]: - timestamp_clause = "" - timestamp_params = {} - if self._filter.date_from: - timestamp_clause += "\nAND timestamp >= %(event_start_time)s" - timestamp_params["event_start_time"] = self._filter.date_from - timedelta(hours=12) - if self._filter.date_to: - timestamp_clause += "\nAND timestamp <= %(event_end_time)s" - timestamp_params["event_end_time"] = self._filter.date_to + timedelta(hours=12) - return timestamp_clause, timestamp_params - - def get_query(self, select_event_ids: bool = False) -> tuple[str, dict[str, Any]]: - if not self._determine_should_join_events(): - return "", {} - - base_params = { - "team_id": self._team_id, - "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)), - } - - _, recording_start_time_params = _get_recording_start_time_clause(self._filter) - ( - provided_session_ids_clause, - provided_session_ids_params, - ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter, column_name="$session_id") - - event_filters = self.build_event_filters - event_filters_params = event_filters.params - ( - events_timestamp_clause, - events_timestamp_params, - ) = self._get_events_timestamp_clause - - groups_query, groups_params = self._get_groups_query() - - # these will be applied to the events table, - # so we only want property filters that make sense in that context - prop_query, prop_params = self._get_prop_groups( - PropertyGroup( - type=PropertyOperatorType.AND, - values=[ - g - for g in self._filter.property_groups.flat - if ( - self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS - and g.type == "person" - ) - or ( - (g.type == "hogql" and "person.properties" not in g.key) - or (g.type != "hogql" and "cohort" not in g.type and g.type != "person") - ) - ], - ), - person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id", - # TRICKY: we saw unusual memory usage behavior in EU clickhouse cluster - # when allowing use of denormalized properties in this query - # it is likely this can be returned to the default of True in future - # but would need careful monitoring - allow_denormalized_props=settings.ALLOW_DENORMALIZED_PROPS_IN_LISTING, - person_properties_mode=( - PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2 - if self._person_on_events_mode == PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS - else PersonPropertiesMode.USING_PERSON_PROPERTIES_COLUMN - ), - ) - - ( - persons_join, - persons_select_params, - persons_sub_query, - ) = self._persons_join_or_subquery(event_filters, prop_query) - - return ( - self._raw_events_query.format( - select_event_ids="groupArray(uuid) as event_ids," if select_event_ids else "", - event_filter_where_conditions=event_filters.where_conditions, - event_filter_having_events_condition=event_filters.having_conditions, - event_filter_having_events_select=event_filters.having_select, - events_timestamp_clause=events_timestamp_clause, - prop_filter_clause=prop_query, - provided_session_ids_clause=provided_session_ids_clause, - persons_join=persons_join, - persons_sub_query=persons_sub_query, - groups_query=groups_query, - context_comment=f"-- running in PoE Mode: {self._person_on_events_mode}", - ), - { - **base_params, - **recording_start_time_params, - **provided_session_ids_params, - **events_timestamp_params, - **event_filters_params, - **prop_params, - **persons_select_params, - **groups_params, - }, - ) - - def _persons_join_or_subquery(self, event_filters, prop_query): - persons_select, persons_select_params = ActorsQuery(filter=self._filter, team=self._team).get_query() - persons_join = "" - persons_sub_query = "" - if persons_select: - # we want to join as infrequently as possible so only join if there are filters that expect it - if ( - "person_props" in prop_query - or "pdi.person_id" in prop_query - or "person_props" in event_filters.where_conditions - ): - persons_join = f"JOIN ({persons_select}) as pdi on pdi.distinct_id = e.distinct_id" - else: - persons_sub_query = ( - f"AND e.distinct_id in (select distinct_id from ({persons_select}) as events_persons_sub_query)" - ) - return persons_join, persons_select_params, persons_sub_query - - @cached_property - def _get_person_id_clause(self) -> tuple[str, dict[str, Any]]: - person_id_clause = "" - person_id_params = {} - if self._filter.person_uuid: - person_id_clause = "AND person_id = %(person_uuid)s" - person_id_params = {"person_uuid": self._filter.person_uuid} - return person_id_clause, person_id_params - - def matching_events(self) -> list[str]: - self._filter.hogql_context.modifiers.personsOnEventsMode = self._person_on_events_mode - query, query_params = self.get_query(select_event_ids=True) - query_results = sync_execute(query, {**query_params, **self._filter.hogql_context.values}) - results = [row[0] for row in query_results] - # flatten and return results - return [item for sublist in results for item in sublist] - - -class SessionRecordingListFromReplaySummary(EventQuery): - # we have to implement this from EventQuery but don't need it - def _determine_should_join_distinct_ids(self) -> None: - pass - - _filter: SessionRecordingsFilter - SESSION_RECORDINGS_DEFAULT_LIMIT = 50 - - def __init__( - self, - team=Team, - **kwargs, - ): - person_on_events_mode = team.person_on_events_mode - super().__init__( - **kwargs, - team=team, - person_on_events_mode=person_on_events_mode, - ) - - @property - def ttl_days(self): - return ttl_days(self._team) - - _session_recordings_query: str = """ - {context_comment} - SELECT - s.session_id, - any(s.team_id), - any(s.distinct_id), - min(s.min_first_timestamp) as start_time, - max(s.max_last_timestamp) as end_time, - dateDiff('SECOND', start_time, end_time) as duration, - argMinMerge(s.first_url) as first_url, - sum(s.click_count), - sum(s.keypress_count), - sum(s.mouse_activity_count), - sum(s.active_milliseconds)/1000 as active_seconds, - duration-active_seconds as inactive_seconds, - sum(s.console_log_count) as console_log_count, - sum(s.console_warn_count) as console_warn_count, - sum(s.console_error_count) as console_error_count - FROM session_replay_events s - WHERE s.team_id = %(team_id)s - -- regardless of what other filters are applied - -- limit by storage TTL - AND s.min_first_timestamp >= %(clamped_to_storage_ttl)s - -- we can filter on the pre-aggregated timestamp columns - -- because any not-the-lowest min value is _more_ greater than the min value - -- and any not-the-highest max value is _less_ lower than the max value - AND s.min_first_timestamp >= %(start_time)s - AND s.min_first_timestamp <= %(end_time)s - {persons_sub_query} - {events_sub_query} - {provided_session_ids_clause} - {log_matching_session_ids_clause} - GROUP BY session_id - HAVING 1=1 {duration_clause} {console_log_clause} - {order_by_clause} - LIMIT %(limit)s OFFSET %(offset)s - """ - - @staticmethod - def _data_to_return(results: list[Any]) -> list[dict[str, Any]]: - default_columns = [ - "session_id", - "team_id", - "distinct_id", - "start_time", - "end_time", - "duration", - "first_url", - "click_count", - "keypress_count", - "mouse_activity_count", - "active_seconds", - "inactive_seconds", - "console_log_count", - "console_warn_count", - "console_error_count", - ] - - return [ - { - **dict(zip(default_columns, row[: len(default_columns)])), - } - for row in results - ] - - def _paginate_results(self, session_recordings) -> SessionRecordingQueryResult: - more_recordings_available = False - if len(session_recordings) > self.limit: - more_recordings_available = True - session_recordings = session_recordings[0 : self.limit] - return SessionRecordingQueryResult(session_recordings, more_recordings_available) - - def run(self) -> SessionRecordingQueryResult: - try: - self._filter.hogql_context.modifiers.personsOnEventsMode = self._person_on_events_mode - query, query_params = self.get_query() - - query_results = sync_execute(query, {**query_params, **self._filter.hogql_context.values}) - session_recordings = self._data_to_return(query_results) - return self._paginate_results(session_recordings) - except Exception as ex: - # error here weren't making it to sentry, let's be explicit - capture_exception(ex, tags={"team_id": self._team.pk}) - raise - - @property - def limit(self): - return self._filter.limit or self.SESSION_RECORDINGS_DEFAULT_LIMIT - - def get_query(self) -> tuple[str, dict[str, Any]]: - offset = self._filter.offset or 0 - - base_params = { - "team_id": self._team_id, - "limit": self.limit + 1, - "offset": offset, - "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)), - } - - _, recording_start_time_params = _get_recording_start_time_clause(self._filter) - ( - provided_session_ids_clause, - provided_session_ids_params, - ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter) - - ( - log_matching_session_ids_clause, - log_matching_session_ids_params, - ) = _get_filter_by_log_text_session_ids_clause(team=self._team, recording_filters=self._filter) - - order_by_clause = _get_order_by_clause(self._filter.target_entity_order) - - duration_clause, duration_params = self.duration_clause(self._filter.duration_type_filter) - console_log_clause = self._get_console_log_clause(self._filter.console_logs_filter) - - events_select, events_join_params = SessionIdEventsQuery( - team=self._team, - filter=self._filter, - ).get_query() - if events_select: - events_select = f"AND s.session_id in (select `$session_id` as session_id from ({events_select}) as session_events_sub_query)" - - persons_select, persons_select_params = ActorsQuery(filter=self._filter, team=self._team).get_query() - if persons_select: - persons_select = ( - f"AND s.distinct_id in (select distinct_id from ({persons_select}) as session_persons_sub_query)" - ) - - return ( - self._session_recordings_query.format( - duration_clause=duration_clause, - provided_session_ids_clause=provided_session_ids_clause, - console_log_clause=console_log_clause, - persons_sub_query=persons_select, - events_sub_query=events_select, - log_matching_session_ids_clause=log_matching_session_ids_clause, - order_by_clause=order_by_clause, - context_comment=f"-- running in PoE Mode: {self._person_on_events_mode}", - ), - { - **base_params, - **events_join_params, - **recording_start_time_params, - **duration_params, - **provided_session_ids_params, - **persons_select_params, - **log_matching_session_ids_params, - }, - ) - - def duration_clause( - self, - duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"], - ) -> tuple[str, dict[str, Any]]: - duration_clause = "" - duration_params = {} - if self._filter.recording_duration_filter: - if self._filter.recording_duration_filter.operator == "gt": - operator = ">" - else: - operator = "<" - duration_clause = "\nAND {duration_type} {operator} %(recording_duration)s".format( - duration_type=duration_filter_type, operator=operator - ) - duration_params = { - "recording_duration": self._filter.recording_duration_filter.value, - } - return duration_clause, duration_params - - @staticmethod - def _get_console_log_clause(console_logs_filter: list[Literal["error", "warn", "info"]]) -> str: - # to avoid a CH migration we map from info to log when constructing the query here - filters = [f"console_{'log' if log == 'info' else log}_count > 0" for log in console_logs_filter] - return f"AND ({' OR '.join(filters)})" if filters else "" diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py deleted file mode 100644 index 09b1c68cfe122..0000000000000 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ /dev/null @@ -1,3115 +0,0 @@ -from datetime import datetime -from uuid import uuid4 - -from dateutil.relativedelta import relativedelta -from django.utils.timezone import now -from freezegun import freeze_time - -from posthog.clickhouse.client import sync_execute -from posthog.clickhouse.log_entries import TRUNCATE_LOG_ENTRIES_TABLE_SQL -from posthog.constants import AvailableFeature -from posthog.models import Cohort, GroupTypeMapping, Person -from posthog.models.action import Action -from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter -from posthog.models.group.util import create_group -from posthog.models.team import Team -from posthog.session_recordings.queries.session_recording_list_from_replay_summary import ( - SessionRecordingListFromReplaySummary, - SessionRecordingQueryResult, -) -from posthog.session_recordings.queries.session_replay_events import ttl_days -from posthog.session_recordings.queries.test.session_replay_sql import ( - produce_replay_summary, -) -from posthog.session_recordings.sql.session_replay_event_sql import ( - TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL, -) -from posthog.test.base import ( - APIBaseTest, - ClickhouseTestMixin, - _create_event, - also_test_with_materialized_columns, - flush_persons_and_events, - snapshot_clickhouse_queries, -) - - -@freeze_time("2021-01-01T13:46:23") -class TestClickhouseSessionRecordingsListFromSessionReplay(ClickhouseTestMixin, APIBaseTest): - @classmethod - def teardown_class(cls): - sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) - sync_execute(TRUNCATE_LOG_ENTRIES_TABLE_SQL) - - def create_action(self, name, team_id=None, properties=None): - if team_id is None: - team_id = self.team.pk - if properties is None: - properties = [] - action = Action.objects.create( - team_id=team_id, name=name, steps_json=[{"event": name, "properties": properties}] - ) - return action - - def create_event( - self, - distinct_id, - timestamp, - team=None, - event_name="$pageview", - properties=None, - ): - if team is None: - team = self.team - if properties is None: - properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} - return _create_event( - team=team, - event=event_name, - timestamp=timestamp, - distinct_id=distinct_id, - properties=properties, - ) - - def _filter_recordings_by(self, recordings_filter: dict) -> SessionRecordingQueryResult: - the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter) - session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=the_filter, team=self.team) - return session_recording_list_instance.run() - - @property - def an_hour_ago(self): - return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) - - @snapshot_clickhouse_queries - def test_basic_query(self): - user = "test_basic_query-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = f"test_basic_query-{str(uuid4())}" - session_id_two = f"test_basic_query-{str(uuid4())}" - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=20)).isoformat().replace("T", " "), - distinct_id=user, - first_url="https://example.io/home", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=50 * 1000 * 0.5, # 50% of the total expected duration - ) - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - distinct_id=user, - first_url="https://a-different-url.com", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=0, # 30% of the total expected duration - ) - - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=2000)), - distinct_id=user, - first_url=None, - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=1980 * 1000 * 0.4, # 40% of the total expected duration - ) - - session_recordings, more_recordings_available = self._filter_recordings_by({"no_filter": None}) - - assert session_recordings == [ - { - "session_id": session_id_two, - "team_id": self.team.pk, - "distinct_id": user, - "click_count": 2, - "keypress_count": 2, - "mouse_activity_count": 2, - "duration": 1980, - "active_seconds": 792.0, - "inactive_seconds": 1188.0, - "start_time": self.an_hour_ago + relativedelta(seconds=20), - "end_time": self.an_hour_ago + relativedelta(seconds=2000), - "first_url": None, - "console_log_count": 0, - "console_warn_count": 0, - "console_error_count": 0, - }, - { - "session_id": session_id_one, - "team_id": self.team.pk, - "distinct_id": user, - "click_count": 4, - "keypress_count": 4, - "mouse_activity_count": 4, - "duration": 50, - "active_seconds": 25.0, - "inactive_seconds": 25.0, - "start_time": self.an_hour_ago, - "end_time": self.an_hour_ago + relativedelta(seconds=50), - "first_url": "https://example.io/home", - "console_log_count": 0, - "console_warn_count": 0, - "console_error_count": 0, - }, - ] - - assert more_recordings_available is False - - @snapshot_clickhouse_queries - def test_basic_query_active_sessions( - self, - ): - user = "test_basic_query-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_total_is_61 = f"test_basic_query_active_sessions-total-{str(uuid4())}" - session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}" - session_id_inactive_is_61 = f"test_basic_query_active_sessions-inactive-{str(uuid4())}" - - produce_replay_summary( - session_id=session_id_total_is_61, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)).isoformat().replace("T", " "), - distinct_id=user, - first_url="https://example.io/home", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=59000, - ) - - produce_replay_summary( - session_id=session_id_active_is_61, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=59)), - distinct_id=user, - first_url="https://a-different-url.com", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=61000, - ) - - produce_replay_summary( - session_id=session_id_inactive_is_61, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)), - distinct_id=user, - first_url="https://a-different-url.com", - click_count=0, - keypress_count=0, - mouse_activity_count=0, - active_milliseconds=0, - ) - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by( - { - "duration_type_filter": "duration", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - } - ) - - assert sorted( - [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings], - key=lambda x: x[0], - ) == [ - (session_id_inactive_is_61, 61, 0.0), - (session_id_total_is_61, 61, 59.0), - ] - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by( - { - "duration_type_filter": "active_seconds", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - } - ) - - assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ - (session_id_active_is_61, 59, 61.0) - ] - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by( - { - "duration_type_filter": "inactive_seconds", - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - } - ) - - assert [(s["session_id"], s["duration"], s["inactive_seconds"]) for s in session_recordings] == [ - (session_id_inactive_is_61, 61, 61.0) - ] - - @snapshot_clickhouse_queries - def test_basic_query_with_paging(self): - user = "test_basic_query_with_paging-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = f"test_basic_query_with_paging-{str(uuid4())}" - session_id_two = f"test_basic_query_with_paging-{str(uuid4())}" - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=20)).isoformat().replace("T", " "), - distinct_id=user, - first_url="https://example.io/home", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=50 * 1000 * 0.5, # 50% of the total expected duration - ) - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - distinct_id=user, - first_url="https://a-different-url.com", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=0, # 30% of the total expected duration - ) - - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=2000)), - distinct_id=user, - first_url=None, - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=1980 * 1000 * 0.4, # 40% of the total expected duration - ) - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by({"no_filter": None, "limit": 1, "offset": 0}) - - assert session_recordings == [ - { - "session_id": session_id_two, - "team_id": self.team.pk, - "distinct_id": user, - "click_count": 2, - "keypress_count": 2, - "mouse_activity_count": 2, - "duration": 1980, - "active_seconds": 792.0, - "inactive_seconds": 1188.0, - "start_time": self.an_hour_ago + relativedelta(seconds=20), - "end_time": self.an_hour_ago + relativedelta(seconds=2000), - "first_url": None, - "console_log_count": 0, - "console_warn_count": 0, - "console_error_count": 0, - } - ] - - assert more_recordings_available is True - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by({"no_filter": None, "limit": 1, "offset": 1}) - - assert session_recordings == [ - { - "session_id": session_id_one, - "team_id": self.team.pk, - "distinct_id": user, - "click_count": 4, - "keypress_count": 4, - "mouse_activity_count": 4, - "duration": 50, - "active_seconds": 25.0, - "inactive_seconds": 25.0, - "start_time": self.an_hour_ago, - "end_time": self.an_hour_ago + relativedelta(seconds=50), - "first_url": "https://example.io/home", - "console_log_count": 0, - "console_warn_count": 0, - "console_error_count": 0, - }, - ] - - assert more_recordings_available is False - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by({"no_filter": None, "limit": 1, "offset": 2}) - - assert session_recordings == [] - - assert more_recordings_available is False - - @snapshot_clickhouse_queries - def test_basic_query_with_ordering(self): - user = "test_basic_query_with_ordering-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = f"test_basic_query_with_ordering-session-1-{str(uuid4())}" - session_id_two = f"test_basic_query_with_ordering-session-2-{str(uuid4())}" - - session_one_start = self.an_hour_ago + relativedelta(seconds=10) - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=session_one_start, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - distinct_id=user, - console_error_count=1000, - active_milliseconds=1, # most errors, but the least activity - ) - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=session_one_start, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - distinct_id=user, - console_error_count=12, - active_milliseconds=1, # most errors, but the least activity - ) - - session_two_start = self.an_hour_ago - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - # starts before session one - first_timestamp=session_two_start, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - distinct_id=user, - console_error_count=430, - active_milliseconds=1000, # most activity, but the least errors - ) - - (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "active_seconds"} - ) - - ordered_by_activity = [(r["session_id"], r["active_seconds"]) for r in session_recordings.results] - assert ordered_by_activity == [(session_id_two, 1.0), (session_id_one, 0.002)] - - (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "console_error_count"} - ) - - ordered_by_errors = [(r["session_id"], r["console_error_count"]) for r in session_recordings.results] - assert ordered_by_errors == [(session_id_one, 1012), (session_id_two, 430)] - - (session_recordings) = self._filter_recordings_by( - {"no_filter": None, "limit": 3, "offset": 0, "entity_order": "start_time"} - ) - - ordered_by_default = [(r["session_id"], r["start_time"]) for r in session_recordings.results] - assert ordered_by_default == [(session_id_one, session_one_start), (session_id_two, session_two_start)] - - def test_first_url_selection(self): - user = "test_first_url_selection-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = f"first-url-on-first-event-{str(uuid4())}" - session_id_two = f"first-url-not-on-first-event-{str(uuid4())}" - session_id_three = f"no-url-from-many-{str(uuid4())}" - session_id_four = f"events-inserted-out-of-order-{str(uuid4())}" - - # session one has the first url on the first event - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago, - last_timestamp=self.an_hour_ago + relativedelta(seconds=20), - first_url="https://on-first-event.com", - ) - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - first_timestamp=self.an_hour_ago + relativedelta(seconds=10), - last_timestamp=self.an_hour_ago + relativedelta(seconds=20), - first_url="https://on-second-event.com", - ) - - produce_replay_summary( - session_id=session_id_one, - team_id=self.team.pk, - first_timestamp=self.an_hour_ago + relativedelta(seconds=20), - last_timestamp=self.an_hour_ago + relativedelta(seconds=40), - first_url="https://on-third-event.com", - ) - - # session two has no URL on the first event - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), - first_url=None, - ) - - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - first_url="https://first-is-on-second-event.com", - ) - - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=25)), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - first_url="https://another-on-the-session.com", - ) - - # session three has no URLs - produce_replay_summary( - session_id=session_id_three, - team_id=self.team.pk, - first_timestamp=self.an_hour_ago, - last_timestamp=self.an_hour_ago + relativedelta(seconds=50), - distinct_id=user, - first_url=None, - ) - - produce_replay_summary( - session_id=session_id_three, - team_id=self.team.pk, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), - last_timestamp=self.an_hour_ago + relativedelta(seconds=50), - distinct_id=user, - first_url=None, - ) - - produce_replay_summary( - session_id=session_id_three, - team_id=self.team.pk, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), - last_timestamp=self.an_hour_ago + relativedelta(seconds=60), - distinct_id=user, - first_url=None, - ) - - # session four events are received out of order - produce_replay_summary( - session_id=session_id_four, - team_id=self.team.pk, - first_timestamp=self.an_hour_ago + relativedelta(seconds=20), - last_timestamp=self.an_hour_ago + relativedelta(seconds=25), - first_url="https://on-first-received-event.com", - ) - produce_replay_summary( - session_id=session_id_four, - team_id=self.team.pk, - first_timestamp=self.an_hour_ago + relativedelta(seconds=10), - last_timestamp=self.an_hour_ago + relativedelta(seconds=25), - first_url="https://on-second-received-event-but-actually-first.com", - ) - - session_recordings, more_recordings_available = self._filter_recordings_by({"no_filter": None}) - - assert sorted( - [{"session_id": r["session_id"], "first_url": r["first_url"]} for r in session_recordings], - key=lambda x: x["session_id"], - ) == sorted( - [ - { - "session_id": session_id_one, - "first_url": "https://on-first-event.com", - }, - { - "session_id": session_id_two, - "first_url": "https://first-is-on-second-event.com", - }, - { - "session_id": session_id_three, - "first_url": None, - }, - { - "session_id": session_id_four, - "first_url": "https://on-second-received-event-but-actually-first.com", - }, - ], - # mypy unhappy about this lambda 🤷️ - key=lambda x: x["session_id"], # type: ignore - ) - - def test_recordings_dont_leak_data_between_teams(self): - another_team = Team.objects.create(organization=self.organization) - user = "test_recordings_dont_leak_data_between_teams-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - Person.objects.create(team=another_team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = f"test_recordings_dont_leak_data_between_teams-1-{str(uuid4())}" - session_id_two = f"test_recordings_dont_leak_data_between_teams-2-{str(uuid4())}" - - produce_replay_summary( - session_id=session_id_one, - team_id=another_team.pk, - distinct_id=user, - first_timestamp=self.an_hour_ago, - last_timestamp=self.an_hour_ago + relativedelta(seconds=20), - first_url=None, - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=20 * 1000 * 0.5, # 50% of the total expected duration - ) - - produce_replay_summary( - session_id=session_id_two, - team_id=self.team.pk, - distinct_id=user, - first_timestamp=self.an_hour_ago, - last_timestamp=self.an_hour_ago + relativedelta(seconds=20), - first_url=None, - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=20 * 1000 * 0.5, # 50% of the total expected duration - ) - - (session_recordings, _) = self._filter_recordings_by({"no_filter": None}) - - assert [{"session": r["session_id"], "user": r["distinct_id"]} for r in session_recordings] == [ - {"session": session_id_two, "user": user} - ] - - @snapshot_clickhouse_queries - def test_event_filter(self): - user = "test_event_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - session_id_one = f"test_event_filter-{str(uuid4())}" - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - user, - self.an_hour_ago, - properties={"$session_id": session_id_one, "$window_id": str(uuid4())}, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$autocapture", - "type": "events", - "order": 0, - "name": "$autocapture", - } - ] - } - ) - assert session_recordings == [] - - @snapshot_clickhouse_queries - def test_event_filter_has_ttl_applied_too(self): - user = "test_event_filter_has_ttl_applied_too-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - session_id_one = f"test_event_filter_has_ttl_applied_too-{str(uuid4())}" - - # this is artificially incorrect data, the session events are within TTL - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - # but the page view event is outside TTL - self.create_event( - user, - self.an_hour_ago - - relativedelta(days=SessionRecordingListFromReplaySummary.SESSION_RECORDINGS_DEFAULT_LIMIT + 1), - properties={"$session_id": session_id_one, "$window_id": str(uuid4())}, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ] - } - ) - assert len(session_recordings) == 0 - - (session_recordings, _) = self._filter_recordings_by({}) - # without an event filter the recording is present, showing that the TTL was applied to the events table too - # we want this to limit the amount of event data we query - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - @snapshot_clickhouse_queries - def test_ttl_days(self): - assert ttl_days(self.team) == 21 - - with self.is_cloud(True): - # Far enough in the future from `days_since_blob_ingestion` but not paid - with freeze_time("2023-09-01T12:00:01Z"): - assert ttl_days(self.team) == 30 - - self.team.organization.available_product_features = [ - {"name": AvailableFeature.RECORDINGS_PLAYLISTS, "key": AvailableFeature.RECORDINGS_PLAYLISTS} - ] - - # Far enough in the future from `days_since_blob_ingestion` but paid - with freeze_time("2023-12-01T12:00:01Z"): - assert ttl_days(self.team) == 90 - - # Not far enough in the future from `days_since_blob_ingestion` - with freeze_time("2023-09-05T12:00:01Z"): - assert ttl_days(self.team) == 35 - - @snapshot_clickhouse_queries - def test_event_filter_with_active_sessions( - self, - ): - user = "test_basic_query-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_total_is_61 = f"test_basic_query_active_sessions-total-{str(uuid4())}" - session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}" - - self.create_event( - user, - self.an_hour_ago, - properties={ - "$session_id": session_id_total_is_61, - "$window_id": str(uuid4()), - }, - ) - produce_replay_summary( - session_id=session_id_total_is_61, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), - last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)).isoformat().replace("T", " "), - distinct_id=user, - first_url="https://example.io/home", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=59000, - ) - - self.create_event( - user, - self.an_hour_ago, - properties={ - "$session_id": session_id_active_is_61, - "$window_id": str(uuid4()), - }, - ) - produce_replay_summary( - session_id=session_id_active_is_61, - team_id=self.team.pk, - # can CH handle a timestamp with no T - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=59)), - distinct_id=user, - first_url="https://a-different-url.com", - click_count=2, - keypress_count=2, - mouse_activity_count=2, - active_milliseconds=61000, - ) - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by( - { - "duration_type_filter": "duration", - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - } - ) - - assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ - (session_id_total_is_61, 61, 59.0) - ] - - ( - session_recordings, - more_recordings_available, - ) = self._filter_recordings_by( - { - "duration_type_filter": "active_seconds", - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - } - ) - - assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ - (session_id_active_is_61, 59, 61.0) - ] - - @also_test_with_materialized_columns(["$current_url", "$browser"]) - @snapshot_clickhouse_queries - def test_event_filter_with_properties(self): - user = "test_event_filter_with_properties-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - session_id_one = f"test_event_filter_with_properties-{str(uuid4())}" - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - user, - self.an_hour_ago, - properties={ - "$browser": "Chrome", - "$session_id": session_id_one, - "$window_id": str(uuid4()), - }, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - { - "key": "$browser", - "value": ["Chrome"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - { - "key": "$browser", - "value": ["Firefox"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - assert session_recordings == [] - - @snapshot_clickhouse_queries - def test_multiple_event_filters(self): - session_id = f"test_multiple_event_filters-{str(uuid4())}" - user = "test_multiple_event_filters-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - - self.create_event( - user, - self.an_hour_ago, - properties={"$session_id": session_id, "$window_id": "1"}, - ) - self.create_event( - user, - self.an_hour_ago, - properties={"$session_id": session_id, "$window_id": "1"}, - event_name="new-event", - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - }, - { - "id": "new-event", - "type": "events", - "order": 0, - "name": "new-event", - }, - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - }, - { - "id": "new-event2", - "type": "events", - "order": 0, - "name": "new-event2", - }, - ] - } - ) - assert session_recordings == [] - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(["$session_id", "$browser"], person_properties=["email"]) - @freeze_time("2023-01-04") - def test_action_filter(self): - user = "test_action_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - session_id_one = f"test_action_filter-session-one" - window_id = "test_action_filter-window-id" - action_with_properties = self.create_action( - "custom-event", - properties=[ - {"key": "$browser", "value": "Firefox"}, - {"key": "$session_id", "value": session_id_one}, - {"key": "$window_id", "value": window_id}, - ], - ) - action_without_properties = self.create_action( - name="custom-event", - properties=[ - {"key": "$session_id", "value": session_id_one}, - {"key": "$window_id", "value": window_id}, - ], - ) - - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - user, - self.an_hour_ago, - event_name="custom-event", - properties={ - "$browser": "Chrome", - "$session_id": session_id_one, - "$window_id": window_id, - }, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "actions": [ - { - "id": action_with_properties.id, - "type": "actions", - "order": 1, - "name": "custom-event", - } - ] - } - ) - assert session_recordings == [] - - (session_recordings, _) = self._filter_recordings_by( - { - "actions": [ - { - "id": action_without_properties.id, - "type": "actions", - "order": 1, - "name": "custom-event", - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - # Adding properties to an action - (session_recordings, _) = self._filter_recordings_by( - { - "actions": [ - { - "id": action_without_properties.id, - "type": "actions", - "order": 1, - "name": "custom-event", - "properties": [ - { - "key": "$browser", - "value": ["Firefox"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - assert session_recordings == [] - - # Adding matching properties to an action - (session_recordings, _) = self._filter_recordings_by( - { - "actions": [ - { - "id": action_without_properties.id, - "type": "actions", - "order": 1, - "name": "custom-event", - "properties": [ - { - "key": "$browser", - "value": ["Chrome"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_one - - def test_all_sessions_recording_object_keys_with_entity_filter(self): - user = "test_all_sessions_recording_object_keys_with_entity_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - session_id = f"test_all_sessions_recording_object_keys_with_entity_filter-{str(uuid4())}" - window_id = str(uuid4()) - - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=60)), - team_id=self.team.id, - ) - self.create_event( - user, - self.an_hour_ago, - properties={"$session_id": session_id, "$window_id": window_id}, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ] - } - ) - - assert session_recordings == [ - { - "session_id": session_id, - "distinct_id": user, - "duration": 60, - "start_time": self.an_hour_ago, - "end_time": self.an_hour_ago + relativedelta(seconds=60), - "active_seconds": 0.0, - "click_count": 0, - "first_url": "https://not-provided-by-test.com", - "inactive_seconds": 60.0, - "keypress_count": 0, - "mouse_activity_count": 0, - "team_id": self.team.id, - "console_log_count": 0, - "console_warn_count": 0, - "console_error_count": 0, - } - ] - - @snapshot_clickhouse_queries - def test_duration_filter(self): - user = "test_duration_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id_one = "session one is 29 seconds long" - produce_replay_summary( - distinct_id=user, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=29)), - team_id=self.team.id, - ) - - session_id_two = "session two is 61 seconds long" - produce_replay_summary( - distinct_id=user, - session_id=session_id_two, - first_timestamp=self.an_hour_ago, - last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - {"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}'} - ) - assert [r["session_id"] for r in session_recordings] == [session_id_two] - - (session_recordings, _) = self._filter_recordings_by( - {"session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"lt"}'} - ) - assert [r["session_id"] for r in session_recordings] == [session_id_one] - - @snapshot_clickhouse_queries - def test_date_from_filter(self): - user = "test_date_from_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - produce_replay_summary( - distinct_id=user, - session_id="three days before base time", - first_timestamp=(self.an_hour_ago - relativedelta(days=3, seconds=100)), - last_timestamp=(self.an_hour_ago - relativedelta(days=3)), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id="two days before base time", - first_timestamp=(self.an_hour_ago - relativedelta(days=2, seconds=100)), - last_timestamp=(self.an_hour_ago - relativedelta(days=2)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by({"date_from": self.an_hour_ago.strftime("%Y-%m-%d")}) - assert session_recordings == [] - - (session_recordings, _) = self._filter_recordings_by( - {"date_from": (self.an_hour_ago - relativedelta(days=2)).strftime("%Y-%m-%d")} - ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == "two days before base time" - - @snapshot_clickhouse_queries - def test_date_from_filter_cannot_search_before_ttl(self): - with freeze_time(self.an_hour_ago): - user = "test_date_from_filter_cannot_search_before_ttl-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - produce_replay_summary( - distinct_id=user, - session_id="storage is past ttl", - first_timestamp=(self.an_hour_ago - relativedelta(days=22)), - # an illegally long session but it started 22 days ago - last_timestamp=(self.an_hour_ago - relativedelta(days=3)), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id="storage is not past ttl", - first_timestamp=(self.an_hour_ago - relativedelta(days=19)), - last_timestamp=(self.an_hour_ago - relativedelta(days=2)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - {"date_from": (self.an_hour_ago - relativedelta(days=20)).strftime("%Y-%m-%d")} - ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == "storage is not past ttl" - - (session_recordings, _) = self._filter_recordings_by( - {"date_from": (self.an_hour_ago - relativedelta(days=21)).strftime("%Y-%m-%d")} - ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == "storage is not past ttl" - - (session_recordings, _) = self._filter_recordings_by( - {"date_from": (self.an_hour_ago - relativedelta(days=22)).strftime("%Y-%m-%d")} - ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == "storage is not past ttl" - - @snapshot_clickhouse_queries - def test_date_to_filter(self): - user = "test_date_to_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - produce_replay_summary( - distinct_id=user, - session_id="three days before base time", - first_timestamp=(self.an_hour_ago - relativedelta(days=3, seconds=100)), - last_timestamp=(self.an_hour_ago - relativedelta(days=3)), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id="two days before base time", - first_timestamp=(self.an_hour_ago - relativedelta(days=2, seconds=100)), - last_timestamp=(self.an_hour_ago - relativedelta(days=2)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - {"date_to": (self.an_hour_ago - relativedelta(days=4)).strftime("%Y-%m-%d")} - ) - assert session_recordings == [] - - (session_recordings, _) = self._filter_recordings_by( - {"date_to": (self.an_hour_ago - relativedelta(days=3)).strftime("%Y-%m-%d")} - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == "three days before base time" - - def test_recording_that_spans_time_bounds(self): - user = "test_recording_that_spans_time_bounds-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - day_line = datetime(2021, 11, 5) - session_id = f"session-one-{user}" - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=(day_line - relativedelta(hours=3)), - last_timestamp=(day_line + relativedelta(hours=3)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "date_to": day_line.strftime("%Y-%m-%d"), - "date_from": (day_line - relativedelta(days=10)).strftime("%Y-%m-%d"), - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id - assert session_recordings[0]["duration"] == 6 * 60 * 60 - - @snapshot_clickhouse_queries - def test_person_id_filter(self): - three_user_ids = [str(uuid4()) for _ in range(3)] - session_id_one = f"test_person_id_filter-{str(uuid4())}" - session_id_two = f"test_person_id_filter-{str(uuid4())}" - p = Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[0], three_user_ids[1]], - properties={"email": "bla"}, - ) - produce_replay_summary( - distinct_id=three_user_ids[0], - session_id=session_id_one, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=three_user_ids[1], - session_id=session_id_two, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=three_user_ids[2], - session_id=str(uuid4()), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by({"person_uuid": str(p.uuid)}) - assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) - - @snapshot_clickhouse_queries - def test_all_filters_at_once(self): - three_user_ids = [str(uuid4()) for _ in range(3)] - target_session_id = f"test_all_filters_at_once-{str(uuid4())}" - - p = Person.objects.create( - team=self.team, - distinct_ids=[three_user_ids[0], three_user_ids[1]], - properties={"email": "bla"}, - ) - custom_event_action = self.create_action(name="custom-event") - - produce_replay_summary( - distinct_id=three_user_ids[0], - session_id=target_session_id, - first_timestamp=(self.an_hour_ago - relativedelta(days=3)), - team_id=self.team.id, - ) - produce_replay_summary( - # does not match because of user distinct id - distinct_id=three_user_ids[2], - session_id=target_session_id, - first_timestamp=(self.an_hour_ago - relativedelta(days=3)), - team_id=self.team.id, - ) - self.create_event( - three_user_ids[0], - self.an_hour_ago - relativedelta(days=3), - properties={"$session_id": target_session_id}, - ) - self.create_event( - three_user_ids[0], - self.an_hour_ago - relativedelta(days=3), - event_name="custom-event", - properties={"$browser": "Chrome", "$session_id": target_session_id}, - ) - produce_replay_summary( - distinct_id=three_user_ids[1], - session_id=target_session_id, - first_timestamp=(self.an_hour_ago - relativedelta(days=3) + relativedelta(hours=6)), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=three_user_ids[1], - # does not match because of session id - session_id=str(uuid4()), - first_timestamp=(self.an_hour_ago - relativedelta(days=3) + relativedelta(hours=6)), - team_id=self.team.id, - ) - - flush_persons_and_events() - - (session_recordings, _) = self._filter_recordings_by( - { - "person_uuid": str(p.uuid), - "date_to": (self.an_hour_ago + relativedelta(days=3)).strftime("%Y-%m-%d"), - "date_from": (self.an_hour_ago - relativedelta(days=10)).strftime("%Y-%m-%d"), - "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "actions": [ - { - "id": custom_event_action.id, - "type": "actions", - "order": 1, - "name": "custom-event", - } - ], - } - ) - # TODO this test has no assertion🫠 - - def test_teams_dont_leak_event_filter(self): - user = "test_teams_dont_leak_event_filter-user" - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - another_team = Team.objects.create(organization=self.organization) - - session_id = f"test_teams_dont_leak_event_filter-{str(uuid4())}" - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event(1, self.an_hour_ago + relativedelta(seconds=15), team=another_team) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ] - } - ) - assert session_recordings == [] - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(person_properties=["email"]) - def test_filter_with_person_properties_exact(self): - session_id_one, session_id_two = self._two_sessions_two_persons( - "test_filter_with_person_properties_exact", - session_one_person_properties={"email": "bla@gmail.com"}, - session_two_person_properties={"email": "bla2@hotmail.com"}, - ) - - query_results: SessionRecordingQueryResult = self._filter_recordings_by( - { - "properties": [ - { - "key": "email", - "value": ["bla@gmail.com"], - "operator": "exact", - "type": "person", - } - ] - } - ) - - assert [x["session_id"] for x in query_results.results] == [session_id_one] - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(person_properties=["email"]) - def test_filter_with_person_properties_not_contains(self): - session_id_one, session_id_two = self._two_sessions_two_persons( - "test_filter_with_person_properties_not_contains", - session_one_person_properties={"email": "bla@gmail.com"}, - session_two_person_properties={"email": "bla2@hotmail.com"}, - ) - - query_results: SessionRecordingQueryResult = self._filter_recordings_by( - {"properties": [{"key": "email", "value": "gmail.com", "operator": "not_icontains", "type": "person"}]} - ) - - assert [x["session_id"] for x in query_results.results] == [session_id_two] - - def _two_sessions_two_persons( - self, label: str, session_one_person_properties: dict, session_two_person_properties: dict - ) -> tuple[str, str]: - sessions = [] - - for i in range(2): - user = f"{label}-user-{i}" - session = f"{label}-session-{i}" - sessions.append(session) - - Person.objects.create( - team=self.team, - distinct_ids=[user], - properties=session_one_person_properties if i == 0 else session_two_person_properties, - ) - - produce_replay_summary( - distinct_id=user, - session_id=session, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id=session, - first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), - team_id=self.team.id, - ) - - return sessions[0], sessions[1] - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(person_properties=["$some_prop"]) - def test_filter_with_cohort_properties(self): - with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): - with freeze_time("2021-08-21T20:00:00.000Z"): - user_one = "test_filter_with_cohort_properties-user" - user_two = "test_filter_with_cohort_properties-user2" - session_id_one = f"test_filter_with_cohort_properties-1-{str(uuid4())}" - session_id_two = f"test_filter_with_cohort_properties-2-{str(uuid4())}" - - Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=[user_two], - properties={"email": "bla2", "$some_prop": "some_val"}, - ) - cohort = Cohort.objects.create( - team=self.team, - name="cohort1", - groups=[ - { - "properties": [ - { - "key": "$some_prop", - "value": "some_val", - "type": "person", - } - ] - } - ], - ) - cohort.calculate_people_ch(pending_version=0) - - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - # self.create_event(user_one, self.an_hour_ago, team=self.team) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - # self.create_event(user_two, self.an_hour_ago, team=self.team) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "properties": [ - { - "key": "id", - "value": cohort.pk, - "operator": None, - "type": "cohort", - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_two - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(person_properties=["$some_prop"]) - def test_filter_with_events_and_cohorts(self): - with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): - with freeze_time("2021-08-21T20:00:00.000Z"): - user_one = "test_filter_with_events_and_cohorts-user" - user_two = "test_filter_with_events_and_cohorts-user2" - session_id_one = f"test_filter_with_events_and_cohorts-1-{str(uuid4())}" - session_id_two = f"test_filter_with_events_and_cohorts-2-{str(uuid4())}" - - Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=[user_two], - properties={"email": "bla2", "$some_prop": "some_val"}, - ) - cohort = Cohort.objects.create( - team=self.team, - name="cohort1", - groups=[ - { - "properties": [ - { - "key": "$some_prop", - "value": "some_val", - "type": "person", - } - ] - } - ], - ) - cohort.calculate_people_ch(pending_version=0) - - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - user_one, - self.an_hour_ago, - team=self.team, - event_name="custom_event", - properties={"$session_id": session_id_one}, - ) - produce_replay_summary( - distinct_id=user_one, - session_id=session_id_one, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - user_two, - self.an_hour_ago, - team=self.team, - event_name="custom_event", - properties={"$session_id": session_id_two}, - ) - produce_replay_summary( - distinct_id=user_two, - session_id=session_id_two, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - # has to be in the cohort and pageview has to be in the events - # test data has one user in the cohort but no pageviews - "properties": [ - { - "key": "id", - "value": cohort.pk, - "operator": None, - "type": "cohort", - } - ], - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - } - ) - - assert len(session_recordings) == 0 - - (session_recordings, _) = self._filter_recordings_by( - { - "properties": [ - { - "key": "id", - "value": cohort.pk, - "operator": None, - "type": "cohort", - } - ], - "events": [ - { - "id": "custom_event", - "type": "events", - "order": 0, - "name": "custom_event", - } - ], - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_two - - @snapshot_clickhouse_queries - @also_test_with_materialized_columns(["$current_url"]) - def test_event_filter_with_matching_on_session_id(self): - user_distinct_id = "test_event_filter_with_matching_on_session_id-user" - Person.objects.create(team=self.team, distinct_ids=[user_distinct_id], properties={"email": "bla"}) - session_id = f"test_event_filter_with_matching_on_session_id-1-{str(uuid4())}" - - self.create_event( - user_distinct_id, - self.an_hour_ago, - event_name="$pageview", - properties={"$session_id": session_id}, - ) - self.create_event( - user_distinct_id, - self.an_hour_ago, - event_name="$autocapture", - properties={"$session_id": str(uuid4())}, - ) - - produce_replay_summary( - distinct_id=user_distinct_id, - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user_distinct_id, - session_id=session_id, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$autocapture", - "type": "events", - "order": 0, - "name": "$autocapture", - } - ] - } - ) - assert session_recordings == [] - - @also_test_with_materialized_columns(event_properties=["$current_url", "$browser"], person_properties=["email"]) - @snapshot_clickhouse_queries - def test_event_filter_with_hogql_properties(self): - user = "test_event_filter_with_hogql_properties-user" - - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id = f"test_event_filter_with_hogql_properties-1-{str(uuid4())}" - self.create_event( - user, - self.an_hour_ago, - properties={ - "$browser": "Chrome", - "$session_id": session_id, - "$window_id": str(uuid4()), - }, - ) - - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, - ], - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [{"key": "properties.$browser == 'Firefox'", "type": "hogql"}], - } - ] - } - ) - - assert session_recordings == [] - - @snapshot_clickhouse_queries - def test_event_filter_with_hogql_person_properties(self): - user = "test_event_filter_with_hogql_properties-user" - - Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) - - session_id = f"test_event_filter_with_hogql_properties-1-{str(uuid4())}" - self.create_event( - user, - self.an_hour_ago, - properties={ - "$browser": "Chrome", - "$session_id": session_id, - "$window_id": str(uuid4()), - }, - ) - - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id=user, - session_id=session_id, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - { - "key": "person.properties.email == 'bla'", - "type": "hogql", - }, - ], - } - ] - } - ) - - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - { - "key": "person.properties.email == 'something else'", - "type": "hogql", - }, - ], - } - ] - } - ) - - assert session_recordings == [] - - @also_test_with_materialized_columns(["$current_url", "$browser"]) - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_any_event_filter_with_properties(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - page_view_session_id = f"pageview-session-{str(uuid4())}" - my_custom_event_session_id = f"my-custom-event-session-{str(uuid4())}" - non_matching__event_session_id = f"non-matching-event-session-{str(uuid4())}" - - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$browser": "Chrome", - "$session_id": page_view_session_id, - "$window_id": "1", - }, - event_name="$pageview", - ) - - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$browser": "Chrome", - "$session_id": my_custom_event_session_id, - "$window_id": "1", - }, - event_name="my-custom-event", - ) - - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$browser": "Safari", - "$session_id": non_matching__event_session_id, - "$window_id": "1", - }, - event_name="my-non-matching-event", - ) - - produce_replay_summary( - distinct_id="user", - session_id=page_view_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id="user", - session_id=my_custom_event_session_id, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - produce_replay_summary( - distinct_id="user", - session_id=non_matching__event_session_id, - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - # an id of null means "match any event" - "id": None, - "type": "events", - "order": 0, - "name": "All events", - "properties": [], - } - ] - } - ) - - assert sorted( - [sr["session_id"] for sr in session_recordings], - ) == [ - my_custom_event_session_id, - non_matching__event_session_id, - page_view_session_id, - ] - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - # an id of null means "match any event" - "id": None, - "type": "events", - "order": 0, - "name": "All events", - "properties": [ - { - "key": "$browser", - "value": ["Chrome"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - - assert sorted( - [sr["session_id"] for sr in session_recordings], - ) == [ - my_custom_event_session_id, - page_view_session_id, - ] - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": None, - "type": "events", - "order": 0, - "name": "All events", - "properties": [ - { - "key": "$browser", - "value": ["Firefox"], - "operator": "exact", - "type": "event", - } - ], - } - ] - } - ) - assert session_recordings == [] - - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_with_console_logs(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - with_logs_session_id = f"with-logs-session-{str(uuid4())}" - without_logs_session_id = f"no-logs-session-{str(uuid4())}" - - produce_replay_summary( - distinct_id="user", - session_id=with_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_log_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=without_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["info"]}) - - assert sorted( - [(sr["session_id"], sr["console_log_count"]) for sr in session_recordings], - key=lambda x: x[0], - ) == [ - (with_logs_session_id, 4), - ] - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["warn"]}) - - assert session_recordings == [] - - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_with_console_warns(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - with_logs_session_id = f"with-logs-session-{str(uuid4())}" - without_logs_session_id = f"no-logs-session-{str(uuid4())}" - - produce_replay_summary( - distinct_id="user", - session_id=with_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_warn_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=without_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["warn"]}) - - assert sorted( - [(sr["session_id"], sr["console_warn_count"]) for sr in session_recordings], - key=lambda x: x[0], - ) == [ - (with_logs_session_id, 4), - ] - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["info"]}) - - assert session_recordings == [] - - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_with_console_errors(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - with_logs_session_id = f"with-logs-session-{str(uuid4())}" - without_logs_session_id = f"no-logs-session-{str(uuid4())}" - - produce_replay_summary( - distinct_id="user", - session_id=with_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_error_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=without_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["error"]}) - - assert sorted( - [(sr["session_id"], sr["console_error_count"]) for sr in session_recordings], - key=lambda x: x[0], - ) == [ - (with_logs_session_id, 4), - ] - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["info"]}) - - assert session_recordings == [] - - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_with_mixed_console_counts(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - with_logs_session_id = f"with-logs-session-{str(uuid4())}" - with_warns_session_id = f"with-warns-session-{str(uuid4())}" - with_errors_session_id = f"with-errors-session-{str(uuid4())}" - with_two_session_id = f"with-two-session-{str(uuid4())}" - - produce_replay_summary( - distinct_id="user", - session_id=with_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_log_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_warns_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_warn_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_errors_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_error_count=4, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_two_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_error_count=4, - console_log_count=3, - ) - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["warn", "error"]}) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_errors_session_id, - with_two_session_id, - with_warns_session_id, - ] - ) - - (session_recordings, _) = self._filter_recordings_by({"console_logs": ["info"]}) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_two_session_id, - with_logs_session_id, - ] - ) - - @snapshot_clickhouse_queries - @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_by_console_text(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - with_logs_session_id = "with-logs-session" - with_warns_session_id = "with-warns-session" - with_errors_session_id = "with-errors-session" - with_two_session_id = "with-two-session" - - produce_replay_summary( - distinct_id="user", - session_id=with_logs_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_log_count=4, - log_messages={ - "info": [ - "log message 1", - "log message 2", - "log message 3", - "log message 4", - ] - }, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_warns_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_warn_count=5, - log_messages={ - "warn": [ - "warn message 1", - "warn message 2", - "warn message 3", - "warn message 4", - "warn message 5", - ] - }, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_errors_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_error_count=4, - log_messages={ - "error": [ - "error message 1", - "error message 2", - "error message 3", - "error message 4", - ] - }, - ) - produce_replay_summary( - distinct_id="user", - session_id=with_two_session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - console_error_count=4, - console_log_count=3, - log_messages={ - "error": [ - "error message 1", - "error message 2", - "error message 3", - "error message 4", - ], - "info": ["log message 1", "log message 2", "log message 3"], - }, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - # there are 5 warn and 4 error logs, message 4 matches in both - "console_logs": ["warn", "error"], - "console_search_query": "message 4", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_errors_session_id, - with_two_session_id, - with_warns_session_id, - ] - ) - - (session_recordings, _) = self._filter_recordings_by( - { - # there are 5 warn and 4 error logs, message 5 matches only matches in warn - "console_logs": ["warn", "error"], - "console_search_query": "message 5", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_warns_session_id, - ] - ) - - (session_recordings, _) = self._filter_recordings_by( - { - # match is case-insensitive - "console_logs": ["warn", "error"], - "console_search_query": "MESSAGE 5", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_warns_session_id, - ] - ) - - (session_recordings, _) = self._filter_recordings_by( - { - # message 5 does not match log level "info" - "console_logs": ["info"], - "console_search_query": "message 5", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted([]) - - @also_test_with_materialized_columns( - event_properties=["is_internal_user"], - person_properties=["email"], - verify_no_jsonextract=False, - ) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_event_filter_with_test_accounts_excluded(self): - self.team.test_account_filters = [ - { - "key": "email", - "value": "@posthog.com", - "operator": "not_icontains", - "type": "person", - }, - { - "key": "is_internal_user", - "value": ["false"], - "operator": "exact", - "type": "event", - }, - {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": "true", - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 0) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 1) - - @also_test_with_materialized_columns( - event_properties=["$browser"], - person_properties=["email"], - verify_no_jsonextract=False, - ) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self): - self.team.test_account_filters = [ - {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={"$session_id": "1", "$window_id": "1", "$browser": "Chrome"}, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={"$session_id": "2", "$window_id": "1", "$browser": "Firefox"}, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - self.team.test_account_filters = [ - {"key": "person.properties.email == 'bla'", "type": "hogql"}, - ] - self.team.save() - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - self.team.test_account_filters = [ - {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, - {"key": "person.properties.email == 'bla'", "type": "hogql"}, - ] - self.team.save() - - # one user sessions matches the person + event test_account filter - (session_recordings, _) = self._filter_recordings_by( - { - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - # TRICKY: we had to disable use of materialized columns for part of the query generation - # due to RAM usage issues on the EU cluster - @also_test_with_materialized_columns(event_properties=["is_internal_user"], verify_no_jsonextract=False) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_top_level_event_property_test_account_filter(self): - """ - This is a regression test. A user with an $ip test account filter - reported the filtering wasn't working. - - The filter wasn't triggering the "should join events check", and so we didn't apply the filter at all - """ - self.team.test_account_filters = [ - { - "key": "is_internal_user", - "value": ["false"], - "operator": "exact", - "type": "event", - }, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": False, - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={ - "$session_id": "2", - "$window_id": "1", - "is_internal_user": True, - }, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the test_accounts filter - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - # TRICKY: we had to disable use of materialized columns for part of the query generation - # due to RAM usage issues on the EU cluster - @also_test_with_materialized_columns(event_properties=["is_internal_user"], verify_no_jsonextract=True) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_top_level_event_property_test_account_filter_allowing_denormalized_props(self): - """ - This is a duplicate of the test test_top_level_event_property_test_account_filter - but with denormalized props allowed - """ - - with self.settings(ALLOW_DENORMALIZED_PROPS_IN_LISTING=True): - self.team.test_account_filters = [ - { - "key": "is_internal_user", - "value": ["false"], - "operator": "exact", - "type": "event", - }, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": False, - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={ - "$session_id": "2", - "$window_id": "1", - "is_internal_user": True, - }, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the test_accounts filter - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - @also_test_with_materialized_columns(event_properties=["is_internal_user"]) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_top_level_hogql_event_property_test_account_filter(self): - """ - This is a regression test. A user with an $ip test account filter - reported the filtering wasn't working. - - The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all - """ - self.team.test_account_filters = [ - {"key": "properties.is_internal_user == 'true'", "type": "hogql"}, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": False, - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={ - "$session_id": "2", - "$window_id": "1", - "is_internal_user": True, - }, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the test_accounts filter - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - @also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_top_level_hogql_person_property_test_account_filter(self): - """ - This is a regression test. A user with an $ip test account filter - reported the filtering wasn't working. - - The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all - """ - self.team.test_account_filters = [ - {"key": "person.properties.email == 'bla'", "type": "hogql"}, - ] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": False, - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={ - "$session_id": "2", - "$window_id": "1", - "is_internal_user": True, - }, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the test_accounts filter - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - @also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False) - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_top_level_person_property_test_account_filter(self): - """ - This is a regression test. A user with an $ip test account filter - reported the filtering wasn't working. - - The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all - """ - self.team.test_account_filters = [{"key": "email", "value": ["bla"], "operator": "exact", "type": "person"}] - self.team.save() - - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create( - team=self.team, - distinct_ids=["user2"], - properties={"email": "not-the-other-one"}, - ) - - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user", - self.an_hour_ago, - properties={ - "$session_id": "1", - "$window_id": "1", - "is_internal_user": False, - }, - ) - produce_replay_summary( - distinct_id="user", - session_id="1", - first_timestamp=self.an_hour_ago + relativedelta(seconds=30), - team_id=self.team.id, - ) - - produce_replay_summary( - distinct_id="user2", - session_id="2", - first_timestamp=self.an_hour_ago, - team_id=self.team.id, - ) - self.create_event( - "user2", - self.an_hour_ago, - properties={ - "$session_id": "2", - "$window_id": "1", - "is_internal_user": True, - }, - ) - - # there are 2 pageviews - (session_recordings, _) = self._filter_recordings_by( - { - # pageview that matches the hogql test_accounts filter - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - } - ], - "filter_test_accounts": False, - } - ) - self.assertEqual(len(session_recordings), 2) - - (session_recordings, _) = self._filter_recordings_by( - { - # only 1 pageview that matches the test_accounts filter - "filter_test_accounts": True, - } - ) - self.assertEqual(len(session_recordings), 1) - - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_event_filter_with_two_events_and_multiple_teams(self): - another_team = Team.objects.create(organization=self.organization) - - # two teams, user with the same properties - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=another_team, distinct_ids=["user"], properties={"email": "bla"}) - - # a recording session with a pageview and a pageleave - self._a_session_with_two_events(self.team, "1") - self._a_session_with_two_events(another_team, "2") - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - }, - { - "id": "$pageleave", - "type": "events", - "order": 0, - "name": "$pageleave", - }, - ], - } - ) - - self.assertEqual([sr["session_id"] for sr in session_recordings], ["1"]) - - def _a_session_with_two_events(self, team: Team, session_id: str) -> None: - produce_replay_summary( - distinct_id="user", - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=team.pk, - ) - self.create_event( - "user", - self.an_hour_ago, - team=team, - event_name="$pageview", - properties={"$session_id": session_id, "$window_id": "1"}, - ) - self.create_event( - "user", - self.an_hour_ago, - team=team, - event_name="$pageleave", - properties={"$session_id": session_id, "$window_id": "1"}, - ) - - @freeze_time("2021-01-21T20:00:00.000Z") - @snapshot_clickhouse_queries - def test_event_filter_with_group_filter(self): - Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - session_id = f"test_event_filter_with_group_filter-ONE-{uuid4()}" - different_group_session = f"test_event_filter_with_group_filter-TWO-{uuid4()}" - - produce_replay_summary( - distinct_id="user", - session_id=session_id, - first_timestamp=self.an_hour_ago, - team_id=self.team.pk, - ) - produce_replay_summary( - distinct_id="user", - session_id=different_group_session, - first_timestamp=self.an_hour_ago, - team_id=self.team.pk, - ) - - GroupTypeMapping.objects.create(team=self.team, group_type="project", group_type_index=0) - create_group( - team_id=self.team.pk, - group_type_index=0, - group_key="project:1", - properties={"name": "project one"}, - ) - - GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=1) - create_group( - team_id=self.team.pk, - group_type_index=1, - group_key="org:1", - properties={"name": "org one"}, - ) - - self.create_event( - "user", - self.an_hour_ago, - team=self.team, - event_name="$pageview", - properties={ - "$session_id": session_id, - "$window_id": "1", - "$group_1": "org:1", - }, - ) - self.create_event( - "user", - self.an_hour_ago, - team=self.team, - event_name="$pageview", - properties={ - "$session_id": different_group_session, - "$window_id": "1", - "$group_0": "project:1", - }, - ) - - (session_recordings, _) = self._filter_recordings_by( - { - "events": [ - { - "id": "$pageview", - "type": "events", - "order": 0, - "name": "$pageview", - "properties": [ - { - "key": "name", - "value": ["org one"], - "operator": "exact", - "type": "group", - "group_type_index": 1, - } - ], - } - ], - } - ) - - self.assertEqual([sr["session_id"] for sr in session_recordings], [session_id]) diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index e4ecc1ccfe37a..0b43b05c4e571 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -37,12 +37,9 @@ SessionRecordingViewed, ) -from posthog.session_recordings.queries.session_recording_list_from_replay_summary import ( - SessionRecordingListFromReplaySummary, - SessionIdEventsQuery, -) from posthog.session_recordings.queries.session_recording_list_from_filters import ( SessionRecordingListFromFilters, + ReplayFiltersEventsSubQuery, ) from posthog.session_recordings.queries.session_recording_properties import ( SessionRecordingProperties, @@ -302,8 +299,21 @@ def matching_events(self, request: request.Request, *args: Any, **kwargs: Any) - "Must specify at least one event or action filter", ) - matching_events: list[str] = SessionIdEventsQuery(filter=filter, team=self.team).matching_events() - return JsonResponse(data={"results": matching_events}) + distinct_id = str(cast(User, request.user).distinct_id) + modifiers = safely_read_modifiers_overrides(distinct_id, self.team) + matching_events_query_response = ReplayFiltersEventsSubQuery( + filter=filter, team=self.team, hogql_query_modifiers=modifiers + ).get_event_ids_for_session() + + response = JsonResponse(data={"results": matching_events_query_response.results}) + + response.headers["Server-Timing"] = ", ".join( + f"{key};dur={round(duration, ndigits=2)}" + for key, duration in _generate_timings( + matching_events_query_response.timings, ServerTimingsGathered() + ).items() + ) + return response # Returns metadata about the recording def retrieve(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: @@ -756,23 +766,13 @@ def list_recordings( filter = filter.shallow_clone({SESSION_RECORDINGS_FILTER_IDS: remaining_session_ids}) if (all_session_ids and filter.session_ids) or not all_session_ids: - has_hog_ql_filtering = request.GET.get("hog_ql_filtering", "false") == "true" - - if has_hog_ql_filtering: - distinct_id = str(cast(User, request.user).distinct_id) - modifiers = safely_read_modifiers_overrides(distinct_id, team) + distinct_id = str(cast(User, request.user).distinct_id) + modifiers = safely_read_modifiers_overrides(distinct_id, team) - with timer("load_recordings_from_hogql"): - (ch_session_recordings, more_recordings_available, hogql_timings) = SessionRecordingListFromFilters( - filter=filter, team=team, hogql_query_modifiers=modifiers - ).run() - else: - # Only go to clickhouse if we still have remaining specified IDs, or we are not specifying IDs - with timer("load_recordings_from_clickhouse"): - ( - ch_session_recordings, - more_recordings_available, - ) = SessionRecordingListFromReplaySummary(filter=filter, team=team).run() + with timer("load_recordings_from_hogql"): + (ch_session_recordings, more_recordings_available, hogql_timings) = SessionRecordingListFromFilters( + filter=filter, team=team, hogql_query_modifiers=modifiers + ).run() with timer("build_recordings"): recordings_from_clickhouse = SessionRecording.get_or_build_from_clickhouse(team, ch_session_recordings) diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index 6369fc4d2bec8..1241e89b6aa2a 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -429,62 +429,76 @@ # --- # name: TestSessionRecordings.test_get_session_recordings.16 ''' - SELECT "posthog_sessionrecording"."id", - "posthog_sessionrecording"."session_id", - "posthog_sessionrecording"."team_id", - "posthog_sessionrecording"."created_at", - "posthog_sessionrecording"."deleted", - "posthog_sessionrecording"."object_storage_path", - "posthog_sessionrecording"."distinct_id", - "posthog_sessionrecording"."duration", - "posthog_sessionrecording"."active_seconds", - "posthog_sessionrecording"."inactive_seconds", - "posthog_sessionrecording"."start_time", - "posthog_sessionrecording"."end_time", - "posthog_sessionrecording"."click_count", - "posthog_sessionrecording"."keypress_count", - "posthog_sessionrecording"."mouse_activity_count", - "posthog_sessionrecording"."console_log_count", - "posthog_sessionrecording"."console_warn_count", - "posthog_sessionrecording"."console_error_count", - "posthog_sessionrecording"."start_url", - "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('test_get_session_recordings-1', - 'test_get_session_recordings-2') - AND "posthog_sessionrecording"."team_id" = 2) + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 ''' # --- # name: TestSessionRecordings.test_get_session_recordings.17 ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 ''' # --- # name: TestSessionRecordings.test_get_session_recordings.18 ''' - SELECT "posthog_persondistinctid"."id", - "posthog_persondistinctid"."team_id", - "posthog_persondistinctid"."person_id", - "posthog_persondistinctid"."distinct_id", - "posthog_persondistinctid"."version", - "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_persondistinctid" - INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") - WHERE ("posthog_persondistinctid"."distinct_id" IN ('user2', - 'user_one_0') - AND "posthog_persondistinctid"."team_id" = 2) + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.19 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) ''' # --- # name: TestSessionRecordings.test_get_session_recordings.2 @@ -549,6 +563,151 @@ LIMIT 21 ''' # --- +# name: TestSessionRecordings.test_get_session_recordings.20 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.21 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.22 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.23 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.24 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.25 + ''' + SELECT "posthog_sessionrecording"."id", + "posthog_sessionrecording"."session_id", + "posthog_sessionrecording"."team_id", + "posthog_sessionrecording"."created_at", + "posthog_sessionrecording"."deleted", + "posthog_sessionrecording"."object_storage_path", + "posthog_sessionrecording"."distinct_id", + "posthog_sessionrecording"."duration", + "posthog_sessionrecording"."active_seconds", + "posthog_sessionrecording"."inactive_seconds", + "posthog_sessionrecording"."start_time", + "posthog_sessionrecording"."end_time", + "posthog_sessionrecording"."click_count", + "posthog_sessionrecording"."keypress_count", + "posthog_sessionrecording"."mouse_activity_count", + "posthog_sessionrecording"."console_log_count", + "posthog_sessionrecording"."console_warn_count", + "posthog_sessionrecording"."console_error_count", + "posthog_sessionrecording"."start_url", + "posthog_sessionrecording"."storage_version" + FROM "posthog_sessionrecording" + WHERE ("posthog_sessionrecording"."session_id" IN ('test_get_session_recordings-1', + 'test_get_session_recordings-2') + AND "posthog_sessionrecording"."team_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.26 + ''' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_get_session_recordings.27 + ''' + SELECT "posthog_persondistinctid"."id", + "posthog_persondistinctid"."team_id", + "posthog_persondistinctid"."person_id", + "posthog_persondistinctid"."distinct_id", + "posthog_persondistinctid"."version", + "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_persondistinctid" + INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") + WHERE ("posthog_persondistinctid"."distinct_id" IN ('user2', + 'user_one_0') + AND "posthog_persondistinctid"."team_id" = 2) + ''' +# --- # name: TestSessionRecordings.test_get_session_recordings.3 ''' SELECT "posthog_team"."id", @@ -1071,6 +1230,165 @@ ''' # --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.10 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.100 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.101 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.102 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.103 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.104 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.106 + ''' + SELECT "posthog_sessionrecording"."id", + "posthog_sessionrecording"."session_id", + "posthog_sessionrecording"."team_id", + "posthog_sessionrecording"."created_at", + "posthog_sessionrecording"."deleted", + "posthog_sessionrecording"."object_storage_path", + "posthog_sessionrecording"."distinct_id", + "posthog_sessionrecording"."duration", + "posthog_sessionrecording"."active_seconds", + "posthog_sessionrecording"."inactive_seconds", + "posthog_sessionrecording"."start_time", + "posthog_sessionrecording"."end_time", + "posthog_sessionrecording"."click_count", + "posthog_sessionrecording"."keypress_count", + "posthog_sessionrecording"."mouse_activity_count", + "posthog_sessionrecording"."console_log_count", + "posthog_sessionrecording"."console_warn_count", + "posthog_sessionrecording"."console_error_count", + "posthog_sessionrecording"."start_url", + "posthog_sessionrecording"."storage_version" + FROM "posthog_sessionrecording" + WHERE ("posthog_sessionrecording"."session_id" IN ('1', + '2', + '3', + '4', + '5', + '6') + AND "posthog_sessionrecording"."team_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.107 + ''' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.108 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1089,11 +1407,16 @@ "posthog_person"."version" FROM "posthog_persondistinctid" INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") - WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1') + WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', + 'user2', + 'user3', + 'user4', + 'user5', + 'user6') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1155,7 +1478,25 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.110 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1187,7 +1528,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.111 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1242,7 +1583,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.112 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -1273,7 +1614,174 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.113 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.114 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.115 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.116 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.117 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.118 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.119 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12 + ''' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.120 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.121 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.122 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -1297,11 +1805,16 @@ "posthog_sessionrecording"."storage_version" FROM "posthog_sessionrecording" WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '2') - AND "posthog_sessionrecording"."team_id" = 2) - ''' + '2', + '3', + '4', + '5', + '6', + '7') + AND "posthog_sessionrecording"."team_id" = 2) + ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.123 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -1309,7 +1822,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.124 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1329,11 +1842,16 @@ FROM "posthog_persondistinctid" INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', - 'user2') + 'user2', + 'user3', + 'user4', + 'user5', + 'user6', + 'user7') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1395,7 +1913,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.126 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1427,7 +1945,62 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.2 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.127 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 2 + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.128 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -1458,7 +2031,32 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.20 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1506,6 +2104,13 @@ "posthog_team"."modifiers", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" @@ -1513,38 +2118,141 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.21 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.130 ''' - SELECT "posthog_organizationmembership"."id", - "posthog_organizationmembership"."organization_id", - "posthog_organizationmembership"."user_id", - "posthog_organizationmembership"."level", - "posthog_organizationmembership"."joined_at", - "posthog_organizationmembership"."updated_at", - "posthog_organization"."id", - "posthog_organization"."name", - "posthog_organization"."slug", - "posthog_organization"."created_at", - "posthog_organization"."updated_at", - "posthog_organization"."plugins_access_level", - "posthog_organization"."for_internal_metrics", - "posthog_organization"."is_member_join_email_enabled", - "posthog_organization"."enforce_2fa", - "posthog_organization"."is_hipaa", - "posthog_organization"."customer_id", - "posthog_organization"."available_product_features", - "posthog_organization"."usage", - "posthog_organization"."never_drop_data", - "posthog_organization"."customer_trust_scores", - "posthog_organization"."setup_section_2_completed", - "posthog_organization"."personalization", - "posthog_organization"."domain_whitelist" - FROM "posthog_organizationmembership" - INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") - WHERE "posthog_organizationmembership"."user_id" = 2 + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.22 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.131 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.132 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.133 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.134 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.135 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.136 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.137 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.138 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -1569,11 +2277,16 @@ FROM "posthog_sessionrecording" WHERE ("posthog_sessionrecording"."session_id" IN ('1', '2', - '3') + '3', + '4', + '5', + '6', + '7', + '8') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.23 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.139 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -1581,7 +2294,39 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.24 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14 + ''' + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."is_active", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 2 + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.140 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1602,11 +2347,16 @@ INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', 'user2', - 'user3') + 'user3', + 'user4', + 'user5', + 'user6', + 'user7', + 'user8') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.25 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.141 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1668,7 +2418,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.26 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.142 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1700,7 +2450,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.27 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.143 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1755,7 +2505,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.28 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.144 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -1786,53 +2536,264 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.29 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145 ''' - SELECT "posthog_sessionrecording"."id", - "posthog_sessionrecording"."session_id", - "posthog_sessionrecording"."team_id", - "posthog_sessionrecording"."created_at", - "posthog_sessionrecording"."deleted", - "posthog_sessionrecording"."object_storage_path", - "posthog_sessionrecording"."distinct_id", - "posthog_sessionrecording"."duration", - "posthog_sessionrecording"."active_seconds", - "posthog_sessionrecording"."inactive_seconds", - "posthog_sessionrecording"."start_time", - "posthog_sessionrecording"."end_time", - "posthog_sessionrecording"."click_count", - "posthog_sessionrecording"."keypress_count", - "posthog_sessionrecording"."mouse_activity_count", - "posthog_sessionrecording"."console_log_count", - "posthog_sessionrecording"."console_warn_count", - "posthog_sessionrecording"."console_error_count", - "posthog_sessionrecording"."start_url", - "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '2', - '3', - '4') - AND "posthog_sessionrecording"."team_id" = 2) + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.146 ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.147 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.148 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_replay_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 2 + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.150 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.151 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.152 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.153 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.154 + ''' + SELECT "posthog_sessionrecording"."id", + "posthog_sessionrecording"."session_id", + "posthog_sessionrecording"."team_id", + "posthog_sessionrecording"."created_at", + "posthog_sessionrecording"."deleted", + "posthog_sessionrecording"."object_storage_path", + "posthog_sessionrecording"."distinct_id", + "posthog_sessionrecording"."duration", + "posthog_sessionrecording"."active_seconds", + "posthog_sessionrecording"."inactive_seconds", + "posthog_sessionrecording"."start_time", + "posthog_sessionrecording"."end_time", + "posthog_sessionrecording"."click_count", + "posthog_sessionrecording"."keypress_count", + "posthog_sessionrecording"."mouse_activity_count", + "posthog_sessionrecording"."console_log_count", + "posthog_sessionrecording"."console_warn_count", + "posthog_sessionrecording"."console_error_count", + "posthog_sessionrecording"."start_url", + "posthog_sessionrecording"."storage_version" + FROM "posthog_sessionrecording" + WHERE ("posthog_sessionrecording"."session_id" IN ('1', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9') + AND "posthog_sessionrecording"."team_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.155 + ''' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.156 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1854,11 +2815,16 @@ WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', 'user2', 'user3', - 'user4') + 'user4', + 'user5', + 'user6', + 'user7', + 'user8', + 'user9') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.157 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1920,7 +2886,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.158 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1952,7 +2918,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.159 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2007,7 +2973,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2038,7 +3004,222 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.160 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.161 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.162 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.163 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.164 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.166 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.167 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.168 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.170 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2062,14 +3243,19 @@ "posthog_sessionrecording"."storage_version" FROM "posthog_sessionrecording" WHERE ("posthog_sessionrecording"."session_id" IN ('1', + '10', '2', '3', '4', - '5') + '5', + '6', + '7', + '8', + '9') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.171 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2077,7 +3263,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.172 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2097,14 +3283,242 @@ FROM "posthog_persondistinctid" INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', + 'user10', 'user2', 'user3', 'user4', - 'user5') + 'user5', + 'user6', + 'user7', + 'user8', + 'user9') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.2 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.20 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.21 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.22 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.23 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.24 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.25 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.26 + ''' + SELECT "posthog_sessionrecording"."id", + "posthog_sessionrecording"."session_id", + "posthog_sessionrecording"."team_id", + "posthog_sessionrecording"."created_at", + "posthog_sessionrecording"."deleted", + "posthog_sessionrecording"."object_storage_path", + "posthog_sessionrecording"."distinct_id", + "posthog_sessionrecording"."duration", + "posthog_sessionrecording"."active_seconds", + "posthog_sessionrecording"."inactive_seconds", + "posthog_sessionrecording"."start_time", + "posthog_sessionrecording"."end_time", + "posthog_sessionrecording"."click_count", + "posthog_sessionrecording"."keypress_count", + "posthog_sessionrecording"."mouse_activity_count", + "posthog_sessionrecording"."console_log_count", + "posthog_sessionrecording"."console_warn_count", + "posthog_sessionrecording"."console_error_count", + "posthog_sessionrecording"."start_url", + "posthog_sessionrecording"."storage_version" + FROM "posthog_sessionrecording" + WHERE ("posthog_sessionrecording"."session_id" IN ('1') + AND "posthog_sessionrecording"."team_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.27 + ''' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.28 + ''' + SELECT "posthog_persondistinctid"."id", + "posthog_persondistinctid"."team_id", + "posthog_persondistinctid"."person_id", + "posthog_persondistinctid"."distinct_id", + "posthog_persondistinctid"."version", + "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_persondistinctid" + INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") + WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1') + AND "posthog_persondistinctid"."team_id" = 2) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.29 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2166,69 +3580,32 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3 ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_replay_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 2 + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -2260,7 +3637,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2315,15 +3692,40 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32 + ''' + SELECT "posthog_organizationmembership"."id", + "posthog_organizationmembership"."organization_id", + "posthog_organizationmembership"."user_id", + "posthog_organizationmembership"."level", + "posthog_organizationmembership"."joined_at", + "posthog_organizationmembership"."updated_at", + "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organizationmembership" + INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") + WHERE "posthog_organizationmembership"."user_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33 ''' - SELECT "posthog_organizationmembership"."id", - "posthog_organizationmembership"."organization_id", - "posthog_organizationmembership"."user_id", - "posthog_organizationmembership"."level", - "posthog_organizationmembership"."joined_at", - "posthog_organizationmembership"."updated_at", - "posthog_organization"."id", + SELECT "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", "posthog_organization"."created_at", @@ -2341,12 +3743,158 @@ "posthog_organization"."setup_section_2_completed", "posthog_organization"."personalization", "posthog_organization"."domain_whitelist" - FROM "posthog_organizationmembership" - INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") - WHERE "posthog_organizationmembership"."user_id" = 2 + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2370,15 +3918,11 @@ "posthog_sessionrecording"."storage_version" FROM "posthog_sessionrecording" WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '2', - '3', - '4', - '5', - '6') + '2') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2386,7 +3930,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2406,15 +3950,11 @@ FROM "posthog_persondistinctid" INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', - 'user2', - 'user3', - 'user4', - 'user5', - 'user6') + 'user2') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2476,7 +4016,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -2508,7 +4048,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2563,7 +4103,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2594,39 +4134,186 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49 ''' - SELECT "posthog_user"."id", - "posthog_user"."password", - "posthog_user"."last_login", - "posthog_user"."first_name", - "posthog_user"."last_name", - "posthog_user"."is_staff", - "posthog_user"."is_active", - "posthog_user"."date_joined", - "posthog_user"."uuid", - "posthog_user"."current_organization_id", - "posthog_user"."current_team_id", - "posthog_user"."email", - "posthog_user"."pending_email", - "posthog_user"."temporary_token", - "posthog_user"."distinct_id", - "posthog_user"."is_email_verified", - "posthog_user"."has_seen_product_intro_for", - "posthog_user"."strapi_id", - "posthog_user"."theme_mode", - "posthog_user"."partial_notification_settings", - "posthog_user"."anonymize_data", - "posthog_user"."toolbar_mode", - "posthog_user"."hedgehog_config", - "posthog_user"."events_column_config", - "posthog_user"."email_opt_in" - FROM "posthog_user" - WHERE "posthog_user"."id" = 2 + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid LIMIT 21 ''' # --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.50 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2651,15 +4338,11 @@ FROM "posthog_sessionrecording" WHERE ("posthog_sessionrecording"."session_id" IN ('1', '2', - '3', - '4', - '5', - '6', - '7') + '3') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2667,7 +4350,24 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2688,15 +4388,11 @@ INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', 'user2', - 'user3', - 'user4', - 'user5', - 'user6', - 'user7') + 'user3') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2758,7 +4454,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -2790,7 +4486,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2845,7 +4541,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2876,7 +4572,184 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.74 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2898,19 +4771,15 @@ "posthog_sessionrecording"."console_error_count", "posthog_sessionrecording"."start_url", "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '2', - '3', - '4', - '5', - '6', - '7', - '8') + FROM "posthog_sessionrecording" + WHERE ("posthog_sessionrecording"."session_id" IN ('1', + '2', + '3', + '4') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.75 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2918,7 +4787,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.76 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2940,70 +4809,11 @@ WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', 'user2', 'user3', - 'user4', - 'user5', - 'user6', - 'user7', - 'user8') + 'user4') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6 - ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_replay_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 2 - LIMIT 21 - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.77 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3065,7 +4875,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.78 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -3097,7 +4907,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.79 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3152,7 +4962,19 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.80 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -3183,7 +5005,186 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.81 + ''' + SELECT "posthog_organization"."id", + "posthog_organization"."name", + "posthog_organization"."slug", + "posthog_organization"."created_at", + "posthog_organization"."updated_at", + "posthog_organization"."plugins_access_level", + "posthog_organization"."for_internal_metrics", + "posthog_organization"."is_member_join_email_enabled", + "posthog_organization"."enforce_2fa", + "posthog_organization"."is_hipaa", + "posthog_organization"."customer_id", + "posthog_organization"."available_product_features", + "posthog_organization"."usage", + "posthog_organization"."never_drop_data", + "posthog_organization"."customer_trust_scores", + "posthog_organization"."setup_section_2_completed", + "posthog_organization"."personalization", + "posthog_organization"."domain_whitelist" + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.82 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.83 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.84 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.86 + ''' + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.87 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.88 + ''' + SELECT "posthog_datawarehousesavedquery"."created_by_id", + "posthog_datawarehousesavedquery"."created_at", + "posthog_datawarehousesavedquery"."deleted", + "posthog_datawarehousesavedquery"."id", + "posthog_datawarehousesavedquery"."name", + "posthog_datawarehousesavedquery"."team_id", + "posthog_datawarehousesavedquery"."columns", + "posthog_datawarehousesavedquery"."external_tables", + "posthog_datawarehousesavedquery"."query" + FROM "posthog_datawarehousesavedquery" + WHERE ("posthog_datawarehousesavedquery"."team_id" = 2 + AND NOT ("posthog_datawarehousesavedquery"."deleted" + AND "posthog_datawarehousesavedquery"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89 + ''' + SELECT "posthog_datawarehousejoin"."created_by_id", + "posthog_datawarehousejoin"."created_at", + "posthog_datawarehousejoin"."deleted", + "posthog_datawarehousejoin"."id", + "posthog_datawarehousejoin"."team_id", + "posthog_datawarehousejoin"."source_table_name", + "posthog_datawarehousejoin"."source_table_key", + "posthog_datawarehousejoin"."joining_table_name", + "posthog_datawarehousejoin"."joining_table_key", + "posthog_datawarehousejoin"."field_name" + FROM "posthog_datawarehousejoin" + WHERE ("posthog_datawarehousejoin"."team_id" = 2 + AND NOT ("posthog_datawarehousejoin"."deleted" + AND "posthog_datawarehousejoin"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) + ''' +# --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.90 ''' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -3210,15 +5211,11 @@ '2', '3', '4', - '5', - '6', - '7', - '8', - '9') + '5') AND "posthog_sessionrecording"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.91 ''' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -3226,7 +5223,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.92 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -3249,15 +5246,11 @@ 'user2', 'user3', 'user4', - 'user5', - 'user6', - 'user7', - 'user8', - 'user9') + 'user5') AND "posthog_persondistinctid"."team_id" = 2) ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.93 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3319,7 +5312,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.94 ''' SELECT "posthog_user"."id", "posthog_user"."password", @@ -3351,7 +5344,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.95 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3406,7 +5399,7 @@ LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.96 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -3437,15 +5430,9 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.97 ''' - SELECT "posthog_organizationmembership"."id", - "posthog_organizationmembership"."organization_id", - "posthog_organizationmembership"."user_id", - "posthog_organizationmembership"."level", - "posthog_organizationmembership"."joined_at", - "posthog_organizationmembership"."updated_at", - "posthog_organization"."id", + SELECT "posthog_organization"."id", "posthog_organization"."name", "posthog_organization"."slug", "posthog_organization"."created_at", @@ -3463,119 +5450,40 @@ "posthog_organization"."setup_section_2_completed", "posthog_organization"."personalization", "posthog_organization"."domain_whitelist" - FROM "posthog_organizationmembership" - INNER JOIN "posthog_organization" ON ("posthog_organizationmembership"."organization_id" = "posthog_organization"."id") - WHERE "posthog_organizationmembership"."user_id" = 2 - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71 - ''' - SELECT "posthog_sessionrecording"."id", - "posthog_sessionrecording"."session_id", - "posthog_sessionrecording"."team_id", - "posthog_sessionrecording"."created_at", - "posthog_sessionrecording"."deleted", - "posthog_sessionrecording"."object_storage_path", - "posthog_sessionrecording"."distinct_id", - "posthog_sessionrecording"."duration", - "posthog_sessionrecording"."active_seconds", - "posthog_sessionrecording"."inactive_seconds", - "posthog_sessionrecording"."start_time", - "posthog_sessionrecording"."end_time", - "posthog_sessionrecording"."click_count", - "posthog_sessionrecording"."keypress_count", - "posthog_sessionrecording"."mouse_activity_count", - "posthog_sessionrecording"."console_log_count", - "posthog_sessionrecording"."console_warn_count", - "posthog_sessionrecording"."console_error_count", - "posthog_sessionrecording"."start_url", - "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('1', - '10', - '2', - '3', - '4', - '5', - '6', - '7', - '8', - '9') - AND "posthog_sessionrecording"."team_id" = 2) - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72 - ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) - ''' -# --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73 - ''' - SELECT "posthog_persondistinctid"."id", - "posthog_persondistinctid"."team_id", - "posthog_persondistinctid"."person_id", - "posthog_persondistinctid"."distinct_id", - "posthog_persondistinctid"."version", - "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_persondistinctid" - INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") - WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', - 'user10', - 'user2', - 'user3', - 'user4', - 'user5', - 'user6', - 'user7', - 'user8', - 'user9') - AND "posthog_persondistinctid"."team_id" = 2) + FROM "posthog_organization" + WHERE "posthog_organization"."id" = '00000000-0000-0000-0000-000000000000'::uuid + LIMIT 21 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.98 ''' - SELECT "posthog_sessionrecording"."id", - "posthog_sessionrecording"."session_id", - "posthog_sessionrecording"."team_id", - "posthog_sessionrecording"."created_at", - "posthog_sessionrecording"."deleted", - "posthog_sessionrecording"."object_storage_path", - "posthog_sessionrecording"."distinct_id", - "posthog_sessionrecording"."duration", - "posthog_sessionrecording"."active_seconds", - "posthog_sessionrecording"."inactive_seconds", - "posthog_sessionrecording"."start_time", - "posthog_sessionrecording"."end_time", - "posthog_sessionrecording"."click_count", - "posthog_sessionrecording"."keypress_count", - "posthog_sessionrecording"."mouse_activity_count", - "posthog_sessionrecording"."console_log_count", - "posthog_sessionrecording"."console_warn_count", - "posthog_sessionrecording"."console_error_count", - "posthog_sessionrecording"."start_url", - "posthog_sessionrecording"."storage_version" - FROM "posthog_sessionrecording" - WHERE ("posthog_sessionrecording"."session_id" IN ('1') - AND "posthog_sessionrecording"."team_id" = 2) + SELECT "posthog_grouptypemapping"."id", + "posthog_grouptypemapping"."team_id", + "posthog_grouptypemapping"."group_type", + "posthog_grouptypemapping"."group_type_index", + "posthog_grouptypemapping"."name_singular", + "posthog_grouptypemapping"."name_plural" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 ''' # --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9 - ''' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.99 + ''' + SELECT "posthog_datawarehousetable"."created_by_id", + "posthog_datawarehousetable"."created_at", + "posthog_datawarehousetable"."deleted", + "posthog_datawarehousetable"."id", + "posthog_datawarehousetable"."name", + "posthog_datawarehousetable"."format", + "posthog_datawarehousetable"."team_id", + "posthog_datawarehousetable"."url_pattern", + "posthog_datawarehousetable"."credential_id", + "posthog_datawarehousetable"."external_data_source_id", + "posthog_datawarehousetable"."columns", + "posthog_datawarehousetable"."row_count" + FROM "posthog_datawarehousetable" + WHERE ("posthog_datawarehousetable"."team_id" = 2 + AND NOT ("posthog_datawarehousetable"."deleted" + AND "posthog_datawarehousetable"."deleted" IS NOT NULL)) ''' # --- diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index bed84f6be517a..5efc1b1e58f4f 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -167,7 +167,7 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self assert results_[0]["distinct_id"] == "user2" assert results_[1]["distinct_id"] in twelve_distinct_ids - @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromReplaySummary") + @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromFilters") def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_lister): mock_summary_lister.return_value.run.return_value = ([], False) diff --git a/posthog/settings/session_replay.py b/posthog/settings/session_replay.py index 429f3207dccf7..0141efc0c0925 100644 --- a/posthog/settings/session_replay.py +++ b/posthog/settings/session_replay.py @@ -26,3 +26,8 @@ REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_MIN_SAMPLES = get_from_env( "REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_MIN_SAMPLES", 10, type_cast=int ) + +REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE = get_from_env("REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE", 0, type_cast=float) +REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET = get_from_env( + "REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET", "posthog-cloud-prod-us-east-1-k8s-replay-samples" +) diff --git a/posthog/storage/object_storage.py b/posthog/storage/object_storage.py index 52d7486dbcfe1..e073d6bf65dae 100644 --- a/posthog/storage/object_storage.py +++ b/posthog/storage/object_storage.py @@ -218,9 +218,9 @@ def object_storage_client() -> ObjectStorageClient: return _client -def write(file_name: str, content: Union[str, bytes], extras: dict | None = None) -> None: +def write(file_name: str, content: Union[str, bytes], extras: dict | None = None, bucket: str | None = None) -> None: return object_storage_client().write( - bucket=settings.OBJECT_STORAGE_BUCKET, + bucket=bucket or settings.OBJECT_STORAGE_BUCKET, key=file_name, content=content, extras=extras, @@ -231,8 +231,8 @@ def tag(file_name: str, tags: dict[str, str]) -> None: return object_storage_client().tag(bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name, tags=tags) -def read(file_name: str) -> Optional[str]: - return object_storage_client().read(bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name) +def read(file_name: str, bucket: str | None = None) -> Optional[str]: + return object_storage_client().read(bucket=bucket or settings.OBJECT_STORAGE_BUCKET, key=file_name) def read_bytes(file_name: str) -> Optional[bytes]: diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index a1fc1f4654fab..8699844b2b647 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -551,39 +551,6 @@ AND "posthog_person"."team_id" = 2) ''' # --- -# name: TestFeatureFlagMatcher.test_numeric_operator_with_cohorts_and_nested_cohorts.2 - ''' - SELECT (((("posthog_person"."properties" -> 'number') > '"100"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = ('string')) - OR (("posthog_person"."properties" -> 'number') > '100.0' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = ('number'))) - AND "posthog_person"."properties" ? 'number' - AND NOT (("posthog_person"."properties" -> 'number') = 'null')) AS "flag_X_condition_0", - (((("posthog_person"."properties" -> 'version') > '"1.05"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version')) = ('string')) - OR (("posthog_person"."properties" -> 'version') > '1.05' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version')) = ('number'))) - AND "posthog_person"."properties" ? 'version' - AND NOT (("posthog_person"."properties" -> 'version') = 'null')) AS "flag_X_condition_0", - (((("posthog_person"."properties" -> 'number') < '"31"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = ('string')) - OR (("posthog_person"."properties" -> 'number') < '31.0' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number')) = ('number'))) - AND "posthog_person"."properties" ? 'number' - AND NOT (("posthog_person"."properties" -> 'number') = 'null') - AND ((("posthog_person"."properties" -> 'nested_prop') > '"20"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'nested_prop')) = ('string')) - OR (("posthog_person"."properties" -> 'nested_prop') > '20.0' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'nested_prop')) = ('number'))) - AND "posthog_person"."properties" ? 'nested_prop' - AND NOT (("posthog_person"."properties" -> 'nested_prop') = 'null')) AS "flag_X_condition_0" - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") - WHERE ("posthog_persondistinctid"."distinct_id" = '307' - AND "posthog_persondistinctid"."team_id" = 2 - AND "posthog_person"."team_id" = 2) - ''' -# --- # name: TestFeatureFlagMatcher.test_numeric_operator_with_groups_and_person_flags ''' SELECT "posthog_grouptypemapping"."id", @@ -656,26 +623,6 @@ AND "posthog_person"."team_id" = 2) ''' # --- -# name: TestFeatureFlagMatcher.test_super_condition_matches_string.1 - ''' - SELECT ((("posthog_person"."properties" -> 'is_enabled') = 'true' - OR ("posthog_person"."properties" -> 'is_enabled') = '"true"') - AND "posthog_person"."properties" ? 'is_enabled' - AND NOT (("posthog_person"."properties" -> 'is_enabled') = 'null')) AS "flag_X_super_condition", ("posthog_person"."properties" -> 'is_enabled') IS NOT NULL AS "flag_X_super_condition_is_set", - (("posthog_person"."properties" -> 'email') = '"fake@posthog.com"' - AND "posthog_person"."properties" ? 'email' - AND NOT (("posthog_person"."properties" -> 'email') = 'null')) AS "flag_X_condition_0", - (("posthog_person"."properties" -> 'email') = '"test@posthog.com"' - AND "posthog_person"."properties" ? 'email' - AND NOT (("posthog_person"."properties" -> 'email') = 'null')) AS "flag_X_condition_1", - (true) AS "flag_X_condition_2" - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") - WHERE ("posthog_persondistinctid"."distinct_id" = 'test_id' - AND "posthog_persondistinctid"."team_id" = 2 - AND "posthog_person"."team_id" = 2) - ''' -# --- # name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases ''' SELECT "posthog_team"."id", @@ -822,43 +769,6 @@ AND "posthog_person"."team_id" = 2) ''' # --- -# name: TestFeatureFlagMatcher.test_with_sql_injection_properties_and_other_aliases.4 - ''' - SELECT (((("posthog_person"."properties" -> 'number space') > '"100"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = ('string')) - OR (("posthog_person"."properties" -> 'number space') > '100.0' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'number space')) = ('number'))) - AND "posthog_person"."properties" ? 'number space' - AND NOT (("posthog_person"."properties" -> 'number space') = 'null') - AND ((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('string') - AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"') - OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('number') - AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0')) - AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;' - AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null')) AS "flag_X_condition_0", - (((JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('string') - AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '"100"') - OR (JSONB_TYPEOF(("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;')) = ('number') - AND ("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') > '100.0')) - AND "posthog_person"."properties" ? ';''" SELECT 1; DROP TABLE posthog_featureflag;' - AND NOT (("posthog_person"."properties" -> ';''" SELECT 1; DROP TABLE posthog_featureflag;') = 'null')) AS "flag_X_condition_1", - (((("posthog_person"."properties" -> 'version!!!') > '"1.05"' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = ('string')) - OR (("posthog_person"."properties" -> 'version!!!') > '1.05' - AND JSONB_TYPEOF(("posthog_person"."properties" -> 'version!!!')) = ('number'))) - AND "posthog_person"."properties" ? 'version!!!' - AND NOT (("posthog_person"."properties" -> 'version!!!') = 'null')) AS "flag_X_condition_2", - ((("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '"21"' - OR ("posthog_person"."properties" -> 'nested_prop --random #comment //test') = '21') - AND "posthog_person"."properties" ? 'nested_prop --random #comment //test' - AND NOT (("posthog_person"."properties" -> 'nested_prop --random #comment //test') = 'null')) AS "flag_X_condition_3" - FROM "posthog_person" - INNER JOIN "posthog_persondistinctid" ON ("posthog_person"."id" = "posthog_persondistinctid"."person_id") - WHERE ("posthog_persondistinctid"."distinct_id" = '307' - AND "posthog_persondistinctid"."team_id" = 2 - AND "posthog_person"."team_id" = 2) - ''' -# --- # name: TestHashKeyOverridesRaceConditions.test_hash_key_overrides_with_simulated_error_race_conditions_on_person_merging 'BEGIN' # --- diff --git a/posthog/test/test_feature_flag.py b/posthog/test/test_feature_flag.py index 098726bbd270d..5c6388f3dec8e 100644 --- a/posthog/test/test_feature_flag.py +++ b/posthog/test/test_feature_flag.py @@ -798,6 +798,63 @@ def test_invalid_regex_match_flag(self): FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0), ) + def test_feature_flag_with_greater_than_filter(self): + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$some_prop": 5}, + ) + feature_flag = self.create_feature_flag( + key="flag-with-gt-filter", + filters={ + "groups": [{"properties": [{"key": "$some_prop", "value": 4, "type": "person", "operator": "gt"}]}] + }, + ) + + with self.assertNumQueries(4): + self.assertEqual( + self.match_flag(feature_flag, "example_id"), + FeatureFlagMatch(True, None, FeatureFlagMatchReason.CONDITION_MATCH, 0), + ) + + def test_feature_flag_with_greater_than_filter_no_match(self): + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$some_prop": 3}, + ) + feature_flag = self.create_feature_flag( + key="flag-with-gt-filter", + filters={ + "groups": [{"properties": [{"key": "$some_prop", "value": 4, "type": "person", "operator": "gt"}]}] + }, + ) + + with self.assertNumQueries(4): + self.assertEqual( + self.match_flag(feature_flag, "example_id"), + FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0), + ) + + def test_feature_flag_with_greater_than_filter_invalid_value(self): + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$some_prop": 3}, + ) + feature_flag = self.create_feature_flag( + key="flag-with-gt-filter", + filters={ + "groups": [{"properties": [{"key": "$some_prop", "value": ["4"], "type": "person", "operator": "gt"}]}] + }, + ) + + with self.assertNumQueries(3): + self.assertEqual( + self.match_flag(feature_flag, "example_id"), + FeatureFlagMatch(False, None, FeatureFlagMatchReason.NO_CONDITION_MATCH, 0), + ) + def test_coercion_of_strings_and_numbers(self): Person.objects.create( team=self.team, diff --git a/rust/capture/src/config.rs b/rust/capture/src/config.rs index d91e7b7241337..cfc38877fec33 100644 --- a/rust/capture/src/config.rs +++ b/rust/capture/src/config.rs @@ -33,8 +33,10 @@ pub struct Config { #[envconfig(default = "capture")] pub otel_service_name: String, + // Used for integration tests #[envconfig(default = "true")] pub export_prometheus: bool, + pub redis_key_prefix: Option, } #[derive(Envconfig, Clone)] diff --git a/rust/capture/src/limiters/billing.rs b/rust/capture/src/limiters/billing.rs index b908519dda265..6f9fe8d2b450e 100644 --- a/rust/capture/src/limiters/billing.rs +++ b/rust/capture/src/limiters/billing.rs @@ -1,3 +1,4 @@ +use metrics::gauge; use std::{collections::HashSet, ops::Sub, sync::Arc}; use crate::redis::Client; @@ -34,7 +35,7 @@ pub enum QuotaResource { } impl QuotaResource { - fn as_str(&self) -> &'static str { + pub fn as_str(&self) -> &'static str { match self { Self::Events => "events", Self::Recordings => "recordings", @@ -52,6 +53,7 @@ pub enum LimiterError { pub struct BillingLimiter { limited: Arc>>, redis: Arc, + redis_key_prefix: String, interval: Duration, updated: Arc>, } @@ -68,6 +70,7 @@ impl BillingLimiter { pub fn new( interval: Duration, redis: Arc, + redis_key_prefix: Option, ) -> anyhow::Result { let limited = Arc::new(RwLock::new(HashSet::new())); @@ -80,22 +83,20 @@ impl BillingLimiter { limited, updated, redis, + redis_key_prefix: redis_key_prefix.unwrap_or_default(), }) } #[instrument(skip_all)] async fn fetch_limited( client: &Arc, - resource: QuotaResource, + key_prefix: &str, + resource: &QuotaResource, ) -> anyhow::Result> { - let now = time::OffsetDateTime::now_utc().unix_timestamp(); - + let now = OffsetDateTime::now_utc().unix_timestamp(); + let key = format!("{key_prefix}{QUOTA_LIMITER_CACHE_KEY}{}", resource.as_str()); client - .zrangebyscore( - format!("{QUOTA_LIMITER_CACHE_KEY}{}", resource.as_str()), - now.to_string(), - String::from("+Inf"), - ) + .zrangebyscore(key, now.to_string(), String::from("+Inf")) .await } @@ -130,12 +131,17 @@ impl BillingLimiter { // On prod atm we call this around 15 times per second at peak times, and it usually // completes in <1ms. - let set = Self::fetch_limited(&self.redis, resource).await; + let set = Self::fetch_limited(&self.redis, &self.redis_key_prefix, &resource).await; tracing::debug!("fetched set from redis, caching"); if let Ok(set) = set { let set = HashSet::from_iter(set.iter().cloned()); + gauge!( + "capture_billing_limits_loaded_tokens", + "resource" => resource.as_str(), + ) + .set(set.len() as f64); let mut limited = self.limited.write().await; *limited = set; @@ -172,25 +178,51 @@ mod tests { #[tokio::test] async fn test_dynamic_limited() { - let client = MockRedisClient::new().zrangebyscore_ret(vec![String::from("banana")]); + let client = MockRedisClient::new() + .zrangebyscore_ret("@posthog/quota-limits/events", vec![String::from("banana")]); let client = Arc::new(client); - let limiter = BillingLimiter::new(Duration::microseconds(1), client) + let limiter = BillingLimiter::new(Duration::microseconds(1), client, None) .expect("Failed to create billing limiter"); - assert_eq!( - limiter - .is_limited("idk it doesn't matter", QuotaResource::Events) + assert!( + !limiter + .is_limited("not_limited", QuotaResource::Events) .await, - false ); + assert!(limiter.is_limited("banana", QuotaResource::Events).await); + } + + #[tokio::test] + async fn test_custom_key_prefix() { + let client = MockRedisClient::new().zrangebyscore_ret( + "prefix//@posthog/quota-limits/events", + vec![String::from("banana")], + ); + let client = Arc::new(client); - assert_eq!( - limiter - .is_limited("some_org_hit_limits", QuotaResource::Events) + // Default lookup without prefix fails + let limiter = BillingLimiter::new(Duration::microseconds(1), client.clone(), None) + .expect("Failed to create billing limiter"); + assert!(!limiter.is_limited("banana", QuotaResource::Events).await); + + // Limiter using the correct prefix + let prefixed_limiter = BillingLimiter::new( + Duration::microseconds(1), + client, + Some("prefix//".to_string()), + ) + .expect("Failed to create billing limiter"); + + assert!( + !prefixed_limiter + .is_limited("not_limited", QuotaResource::Events) .await, - false ); - assert!(limiter.is_limited("banana", QuotaResource::Events).await); + assert!( + prefixed_limiter + .is_limited("banana", QuotaResource::Events) + .await + ); } } diff --git a/rust/capture/src/redis.rs b/rust/capture/src/redis.rs index c83c0ad89a8ac..7ab812e3bdae5 100644 --- a/rust/capture/src/redis.rs +++ b/rust/capture/src/redis.rs @@ -1,6 +1,7 @@ +use std::collections::HashMap; use std::time::Duration; -use anyhow::Result; +use anyhow::{anyhow, Result}; use async_trait::async_trait; use redis::AsyncCommands; use tokio::time::timeout; @@ -48,19 +49,18 @@ impl Client for RedisClient { // mockall got really annoying with async and results so I'm just gonna do my own #[derive(Clone)] pub struct MockRedisClient { - zrangebyscore_ret: Vec, + zrangebyscore_ret: HashMap>, } impl MockRedisClient { pub fn new() -> MockRedisClient { MockRedisClient { - zrangebyscore_ret: Vec::new(), + zrangebyscore_ret: HashMap::new(), } } - pub fn zrangebyscore_ret(&mut self, ret: Vec) -> Self { - self.zrangebyscore_ret = ret; - + pub fn zrangebyscore_ret(&mut self, key: &str, ret: Vec) -> Self { + self.zrangebyscore_ret.insert(key.to_owned(), ret); self.clone() } } @@ -74,7 +74,10 @@ impl Default for MockRedisClient { #[async_trait] impl Client for MockRedisClient { // A very simplified wrapper, but works for our usage - async fn zrangebyscore(&self, _k: String, _min: String, _max: String) -> Result> { - Ok(self.zrangebyscore_ret.clone()) + async fn zrangebyscore(&self, key: String, _min: String, _max: String) -> Result> { + match self.zrangebyscore_ret.get(&key) { + Some(val) => Ok(val.clone()), + None => Err(anyhow!("unknown key")), + } } } diff --git a/rust/capture/src/server.rs b/rust/capture/src/server.rs index 85850363e762c..5b8cf06c86842 100644 --- a/rust/capture/src/server.rs +++ b/rust/capture/src/server.rs @@ -24,8 +24,12 @@ where let redis_client = Arc::new(RedisClient::new(config.redis_url).expect("failed to create redis client")); - let billing = BillingLimiter::new(Duration::seconds(5), redis_client.clone()) - .expect("failed to create billing limiter"); + let billing = BillingLimiter::new( + Duration::seconds(5), + redis_client.clone(), + config.redis_key_prefix, + ) + .expect("failed to create billing limiter"); let app = if config.print_sink { // Print sink is only used for local debug, don't allow a container with it to run on prod diff --git a/rust/capture/tests/common.rs b/rust/capture/tests/common.rs index 868b27c120a7f..5dd4c639aa5f0 100644 --- a/rust/capture/tests/common.rs +++ b/rust/capture/tests/common.rs @@ -3,6 +3,7 @@ use std::default::Default; use std::net::SocketAddr; use std::num::NonZeroU32; +use std::ops::Add; use std::str::FromStr; use std::string::ToString; use std::sync::{Arc, Once}; @@ -17,12 +18,15 @@ use rdkafka::config::{ClientConfig, FromClientConfig}; use rdkafka::consumer::{BaseConsumer, Consumer}; use rdkafka::util::Timeout; use rdkafka::{Message, TopicPartitionList}; +use redis::{Client, Commands}; +use time::OffsetDateTime; use tokio::net::TcpListener; use tokio::sync::Notify; use tokio::time::timeout; use tracing::{debug, warn}; use capture::config::{Config, KafkaConfig}; +use capture::limiters::billing::QuotaResource; use capture::server::serve; pub static DEFAULT_CONFIG: Lazy = Lazy::new(|| Config { @@ -47,6 +51,7 @@ pub static DEFAULT_CONFIG: Lazy = Lazy::new(|| Config { otel_sampling_rate: 0.0, otel_service_name: "capture-testing".to_string(), export_prometheus: false, + redis_key_prefix: None, }); static TRACING_INIT: Once = Once::new(); @@ -206,6 +211,35 @@ async fn delete_topic(topic: String) { .expect("failed to delete topic"); } +pub struct PrefixedRedis { + key_prefix: String, + client: Client, +} + +impl PrefixedRedis { + pub async fn new() -> Self { + Self { + key_prefix: random_string("test", 8) + "/", + client: Client::open(DEFAULT_CONFIG.redis_url.clone()) + .expect("failed to create redis client"), + } + } + + pub fn key_prefix(&self) -> Option { + Some(self.key_prefix.to_string()) + } + + pub fn add_billing_limit(&self, res: QuotaResource, token: &str, until: time::Duration) { + let key = format!("{}@posthog/quota-limits/{}", self.key_prefix, res.as_str()); + let score = OffsetDateTime::now_utc().add(until).unix_timestamp(); + self.client + .get_connection() + .expect("failed to get connection") + .zadd::(key, token, score) + .expect("failed to insert in redis"); + } +} + pub fn random_string(prefix: &str, length: usize) -> String { let suffix: String = rand::thread_rng() .sample_iter(Alphanumeric) diff --git a/rust/capture/tests/django_compat.rs b/rust/capture/tests/django_compat.rs index 87b0a1b269256..abbc5356803b9 100644 --- a/rust/capture/tests/django_compat.rs +++ b/rust/capture/tests/django_compat.rs @@ -100,7 +100,7 @@ async fn it_matches_django_capture_behaviour() -> anyhow::Result<()> { let timesource = FixedTime { time: case.now }; let redis = Arc::new(MockRedisClient::new()); - let billing = BillingLimiter::new(Duration::weeks(1), redis.clone()) + let billing = BillingLimiter::new(Duration::weeks(1), redis.clone(), None) .expect("failed to create billing limiter"); let app = router( diff --git a/rust/capture/tests/events.rs b/rust/capture/tests/events.rs index 7d2defcebd5ff..0554aae905ec8 100644 --- a/rust/capture/tests/events.rs +++ b/rust/capture/tests/events.rs @@ -1,7 +1,9 @@ use std::num::NonZeroU32; +use time::Duration; use anyhow::Result; use assert_json_diff::assert_json_include; +use capture::limiters::billing::QuotaResource; use reqwest::StatusCode; use serde_json::json; @@ -349,3 +351,63 @@ async fn it_trims_distinct_id() -> Result<()> { Ok(()) } + +#[tokio::test] +async fn it_applies_billing_limits() -> Result<()> { + setup_tracing(); + let token1 = random_string("token", 16); + let token2 = random_string("token", 16); + let token3 = random_string("token", 16); + let distinct_id = random_string("id", 16); + + let topic = EphemeralTopic::new().await; + + // Setup billing limits: + // - token1 limit is expired -> accept messages + // - token2 limit is active -> drop messages + // - token3 is not in redis -> accept by default + let redis = PrefixedRedis::new().await; + redis.add_billing_limit(QuotaResource::Events, &token1, Duration::seconds(-60)); + redis.add_billing_limit(QuotaResource::Events, &token2, Duration::seconds(60)); + + let mut config = DEFAULT_CONFIG.clone(); + config.redis_key_prefix = redis.key_prefix(); + config.kafka.kafka_topic = topic.topic_name().to_string(); + let server = ServerHandle::for_config(config).await; + + for payload in [ + json!({ + "token": token1, + "batch": [{"event": "event1","distinct_id": distinct_id}] + }), + json!({ + "token": token2, + "batch": [{"event": "to drop","distinct_id": distinct_id}] + }), + json!({ + "token": token3, + "batch": [{"event": "event1","distinct_id": distinct_id}] + }), + ] { + let res = server.capture_events(payload.to_string()).await; + assert_eq!(StatusCode::OK, res.status()); + } + + // Batches 1 and 3 go through, batch 2 is dropped + assert_json_include!( + actual: topic.next_event()?, + expected: json!({ + "token": token1, + "distinct_id": distinct_id + }) + ); + assert_json_include!( + actual: topic.next_event()?, + expected: json!({ + "token": token3, + "distinct_id": distinct_id + }) + ); + + Ok(()) +}