diff --git a/.storybook/preview.tsx b/.storybook/preview.tsx index 0cda6703cfd37..9b0a76da1d367 100644 --- a/.storybook/preview.tsx +++ b/.storybook/preview.tsx @@ -15,6 +15,17 @@ const setupMsw = () => { // Make sure the msw worker is started worker.start({ quiet: true, + onUnhandledRequest(request, print) { + // MSW warns on all unhandled requests, but we don't necessarily care + const pathAllowList = ['/images/'] + + if (pathAllowList.some((path) => request.url.pathname.startsWith(path))) { + return + } + + // Otherwise, default MSW warning behavior + print.warning() + }, }) ;(window as any).__mockServiceWorker = worker ;(window as any).POSTHOG_APP_CONTEXT = getStorybookAppContext() diff --git a/docker-compose.hobby.yml b/docker-compose.hobby.yml index cc61b627e0a0c..bf63efa21e0b2 100644 --- a/docker-compose.hobby.yml +++ b/docker-compose.hobby.yml @@ -13,8 +13,11 @@ services: extends: file: docker-compose.base.yml service: db + # Pin to postgres 12 until we have a process for pg_upgrade to postgres 15 for exsisting installations + image: ${DOCKER_REGISTRY_PREFIX:-}postgres:12-alpine volumes: - postgres-data:/var/lib/postgresql/data + redis: extends: file: docker-compose.base.yml diff --git a/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr b/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr index d8c1a92e6e35f..955c3b33da9d3 100644 --- a/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr +++ b/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr @@ -83,7 +83,7 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_15_level_level_0_level_0_level_0_0 + AND event = '$pageview') > 0 AS performed_event_condition_17_level_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -113,7 +113,7 @@ HAVING max(is_deleted) = 0 AND (((((NOT has(['something1'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', ''))))))))) person ON person.person_id = behavior_query.person_id WHERE 1 = 1 - AND ((((performed_event_condition_15_level_level_0_level_0_level_0_0)))) ) as person + AND ((((performed_event_condition_17_level_level_0_level_0_level_0_0)))) ) as person UNION ALL SELECT person_id, cohort_id, @@ -148,7 +148,7 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_17_level_level_0_level_0_level_0_0 + AND event = '$pageview') > 0 AS performed_event_condition_19_level_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -178,7 +178,7 @@ HAVING max(is_deleted) = 0 AND (((((NOT has(['something1'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', ''))))))))) person ON person.person_id = behavior_query.person_id WHERE 1 = 1 - AND ((((performed_event_condition_17_level_level_0_level_0_level_0_0)))) ) )) + AND ((((performed_event_condition_19_level_level_0_level_0_level_0_0)))) ) )) ' --- # name: TestCohort.test_cohortpeople_with_not_in_cohort_operator_for_behavioural_cohorts @@ -195,7 +195,7 @@ FROM (SELECT pdi.person_id AS person_id, minIf(timestamp, event = 'signup') >= now() - INTERVAL 15 day - AND minIf(timestamp, event = 'signup') < now() as first_time_condition_18_level_level_0_level_0_0 + AND minIf(timestamp, event = 'signup') < now() as first_time_condition_20_level_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -208,7 +208,7 @@ AND event IN ['signup'] GROUP BY person_id) behavior_query WHERE 1 = 1 - AND (((first_time_condition_18_level_level_0_level_0_0))) ) as person + AND (((first_time_condition_20_level_level_0_level_0_0))) ) as person UNION ALL SELECT person_id, cohort_id, @@ -237,9 +237,9 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_19_level_level_0_level_0_level_0_0, + AND event = '$pageview') > 0 AS performed_event_condition_21_level_level_0_level_0_level_0_0, minIf(timestamp, event = 'signup') >= now() - INTERVAL 15 day - AND minIf(timestamp, event = 'signup') < now() as first_time_condition_19_level_level_0_level_1_level_0_level_0_level_0_0 + AND minIf(timestamp, event = 'signup') < now() as first_time_condition_21_level_level_0_level_1_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -252,8 +252,8 @@ AND event IN ['$pageview', 'signup'] GROUP BY person_id) behavior_query WHERE 1 = 1 - AND ((((performed_event_condition_19_level_level_0_level_0_level_0_0)) - AND ((((NOT first_time_condition_19_level_level_0_level_1_level_0_level_0_level_0_0)))))) ) as person + AND ((((performed_event_condition_21_level_level_0_level_0_level_0_0)) + AND ((((NOT first_time_condition_21_level_level_0_level_1_level_0_level_0_level_0_0)))))) ) as person UNION ALL SELECT person_id, cohort_id, diff --git a/ee/clickhouse/models/test/__snapshots__/test_property.ambr b/ee/clickhouse/models/test/__snapshots__/test_property.ambr index d27396834cf99..b3f6f049cf619 100644 --- a/ee/clickhouse/models/test/__snapshots__/test_property.ambr +++ b/ee/clickhouse/models/test/__snapshots__/test_property.ambr @@ -146,7 +146,7 @@ )) ', { - 'global_cohort_id_0': 47, + 'global_cohort_id_0': 1, 'global_version_0': None, }, ) diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr index 76b856caa0287..f312dde127a84 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr @@ -1,6 +1,6 @@ # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results ' - /* user_id:51 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:138 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -12,50 +12,78 @@ --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.2 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test', 'ablahebf', ''] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['control', 'test', 'ablahebf', '']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) FROM - (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, count(*) as count FROM events e WHERE team_id = 2 - AND event = '$pageview' + AND event IN ['$pageleave_funnel', '$pageview_funnel'] AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') GROUP BY value @@ -64,6 +92,78 @@ OFFSET 0) ' --- +# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.4 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview_funnel', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave_funnel', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave_funnel', '$pageview_funnel'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.5 ' /* user_id:0 request:_snapshot_ */ diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index 15bbb8312a341..be61b4ccc3d33 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -1,25 +1,91 @@ # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results ' - /* user_id:58 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 @@ -137,54 +203,6 @@ ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones - ' - /* user_id:59 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -202,7 +220,7 @@ OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -274,31 +292,7 @@ GROUP BY prop ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ' - /* user_id:61 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -310,7 +304,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.3 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -322,7 +316,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -332,15 +326,15 @@ FROM events e WHERE team_id = 2 AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') GROUP BY value ORDER BY count DESC, value DESC LIMIT 25 OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.5 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -375,7 +369,7 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + if(has([['test'], ['control']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop @@ -400,8 +394,8 @@ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id WHERE team_id = 2 AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') AND (step_0 = 1 OR step_1 = 1) ))) WHERE step_0 = 1 )) @@ -412,55 +406,7 @@ GROUP BY prop ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation - ' - /* user_id:62 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.4 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -478,7 +424,7 @@ OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -513,13 +459,13 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop FROM (SELECT e.timestamp as timestamp, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as aggregation_target, pdi.person_id as person_id , if(event = '$pageview', 1, 0) as step_0, if(step_0 = 1, timestamp, null) as latest_0, @@ -550,9 +496,9 @@ GROUP BY prop ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ' - /* user_id:65 celery:posthog.celery.sync_insight_caching_state */ + /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -562,7 +508,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -574,28 +520,447 @@ ORDER BY age; ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 + ' + /* celery:posthog.celery.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id ORDER BY age; ' --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.3 + ' + /* celery:posthog.celery.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.4 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.5 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['test', 'control'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['test', 'control']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 2 + AND event = '$feature_flag_called' + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') in (['control', 'test']) ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 @@ -749,50 +1114,97 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ' - /* user_id:66 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview1' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test_1', 'test_2'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview1' + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['control', 'test_1', 'test_2']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 @@ -892,7 +1304,7 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone ' - /* user_id:68 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:1 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -1089,7 +1501,7 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter ' - /* user_id:70 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:3 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 9f86ee3fe1d51..07764b83845d8 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -777,7 +777,7 @@ def test_used_in_experiment_is_populated_correctly_for_feature_flag_list(self) - ).json() # TODO: Make sure permission bool doesn't cause n + 1 - with self.assertNumQueries(11): + with self.assertNumQueries(12): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) result = response.json() diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png index f9063f35a84fa..23a9edff296e8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png index 7e09e95a91e3d..72044664032ff 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png index 2d8bc66db9e21..ca05fd2fff918 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png index 3c573bbf93467..17c750c0c42d7 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png index 2d8bc66db9e21..ca05fd2fff918 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png index 4c0d0f64fd16a..7f2f047e58950 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png index cc47c9d64706c..2e25a8113f1d1 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png b/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png new file mode 100644 index 0000000000000..f82abb66ca799 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png index 8e3052db551e1..8b5342e9b47e9 100644 Binary files a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png and b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png index b533f8a57619e..e34fe137f3088 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png and b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png differ diff --git a/frontend/src/layout/navigation/TopBar/TopBar.tsx b/frontend/src/layout/navigation/TopBar/TopBar.tsx index cf73ee60f4b8b..4c51c2453ae40 100644 --- a/frontend/src/layout/navigation/TopBar/TopBar.tsx +++ b/frontend/src/layout/navigation/TopBar/TopBar.tsx @@ -30,6 +30,26 @@ export function TopBar(): JSX.Element { const { hideInviteModal } = useActions(inviteLogic) const { groupNamesTaxonomicTypes } = useValues(groupsModel) const { featureFlags } = useValues(featureFlagLogic) + + const hasNotebooks = !!featureFlags[FEATURE_FLAGS.NOTEBOOKS] + + const groupTypes = [ + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Persons, + TaxonomicFilterGroupType.Actions, + TaxonomicFilterGroupType.Cohorts, + TaxonomicFilterGroupType.Insights, + TaxonomicFilterGroupType.FeatureFlags, + TaxonomicFilterGroupType.Plugins, + TaxonomicFilterGroupType.Experiments, + TaxonomicFilterGroupType.Dashboards, + ...groupNamesTaxonomicTypes, + ] + + if (hasNotebooks) { + groupTypes.push(TaxonomicFilterGroupType.Notebooks) + } + return ( <> @@ -48,26 +68,12 @@ export function TopBar(): JSX.Element {
- +
- {!!featureFlags[FEATURE_FLAGS.NOTEBOOKS] && } + {hasNotebooks && } diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 95e72ac18729d..c56bf0bc8086e 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -1321,7 +1321,7 @@ const api = { }, async update( notebookId: NotebookType['short_id'], - data: Pick + data: Pick ): Promise { return await new ApiRequest().notebook(notebookId).update({ data }) }, @@ -1348,11 +1348,11 @@ const api = { q = { ...q, created_by: createdBy } } if (search) { - q = { ...q, s: search } + q = { ...q, search: search } } return await apiRequest.withQueryString(q).get() }, - async create(data?: Pick): Promise { + async create(data?: Pick): Promise { return await new ApiRequest().notebooks().create({ data }) }, async delete(notebookId: NotebookType['short_id']): Promise { diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx index 773cfa0809949..c8547ae06af50 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx @@ -23,6 +23,7 @@ import { PersonType, PluginType, PropertyDefinition, + NotebookType, } from '~/types' import { cohortsModel } from '~/models/cohortsModel' import { actionsModel } from '~/models/actionsModel' @@ -42,6 +43,7 @@ import { groupDisplayId } from 'scenes/persons/GroupActorDisplay' import { infiniteListLogicType } from 'lib/components/TaxonomicFilter/infiniteListLogicType' import { updatePropertyDefinitions } from '~/models/propertyDefinitionsModel' import { InlineHogQLEditor } from './InlineHogQLEditor' +import { FEATURE_FLAGS } from 'lib/constants' export const eventTaxonomicGroupProps: Pick = { getPopoverHeader: (eventDefinition: EventDefinition): string => { @@ -77,6 +79,8 @@ export const taxonomicFilterLogic = kea({ ['groupTypes', 'aggregationLabel'], groupPropertiesModel, ['allGroupProperties'], + featureFlagsLogic, + ['featureFlags'], ], }, actions: () => ({ @@ -146,15 +150,17 @@ export const taxonomicFilterLogic = kea({ s.groupAnalyticsTaxonomicGroupNames, s.eventNames, s.excludedProperties, + s.featureFlags, ], ( teamId, groupAnalyticsTaxonomicGroups, groupAnalyticsTaxonomicGroupNames, eventNames, - excludedProperties + excludedProperties, + featureFlags ): TaxonomicFilterGroup[] => { - return [ + const groups = [ { name: 'Events', searchPlaceholder: 'events', @@ -209,7 +215,7 @@ export const taxonomicFilterLogic = kea({ filter_by_event_names: true, }).url : undefined, - expandLabel: ({ count, expandedCount }) => + expandLabel: ({ count, expandedCount }: { count: number; expandedCount: number }) => `Show ${pluralize(expandedCount - count, 'property', 'properties')} that ${pluralize( eventNames.length, 'has', @@ -237,7 +243,7 @@ export const taxonomicFilterLogic = kea({ filter_by_event_names: true, }).url : undefined, - expandLabel: ({ count, expandedCount }) => + expandLabel: ({ count, expandedCount }: { count: number; expandedCount: number }) => `Show ${pluralize(expandedCount - count, 'property', 'properties')} that ${pluralize( eventNames.length, 'has', @@ -408,8 +414,8 @@ export const taxonomicFilterLogic = kea({ value: '$session_duration', }, ], - getName: (option) => option.name, - getValue: (option) => option.value, + getName: (option: any) => option.name, + getValue: (option: any) => option.value, getPopoverHeader: () => 'Session', }, { @@ -422,6 +428,21 @@ export const taxonomicFilterLogic = kea({ ...groupAnalyticsTaxonomicGroups, ...groupAnalyticsTaxonomicGroupNames, ] + + if (featureFlags[FEATURE_FLAGS.NOTEBOOKS]) { + groups.push({ + name: 'Notebooks', + searchPlaceholder: 'notebooks', + type: TaxonomicFilterGroupType.Notebooks, + value: 'notebooks', + endpoint: `api/projects/${teamId}/notebooks/`, + getName: (notebook: NotebookType) => notebook.title || `Notebook ${notebook.short_id}`, + getValue: (notebook: NotebookType) => notebook.short_id, + getPopoverHeader: () => 'Notebooks', + }) + } + + return groups }, ], activeTaxonomicGroup: [ diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index 5d03149f671ea..5dd74ef575aae 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -83,6 +83,7 @@ export enum TaxonomicFilterGroupType { GroupNamesPrefix = 'name_groups', Sessions = 'sessions', HogQLExpression = 'hogql_expression', + Notebooks = 'notebooks', } export interface InfiniteListLogicProps extends TaxonomicFilterLogicProps { diff --git a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx index 2412e0b8bff8d..dc8e9384a7fd5 100644 --- a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx +++ b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx @@ -109,6 +109,8 @@ function redirectOnSelectItems( ) } else if (groupType === TaxonomicFilterGroupType.Dashboards) { router.actions.push(urls.dashboard(value)) + } else if (groupType === TaxonomicFilterGroupType.Notebooks) { + router.actions.push(urls.notebook(String(value))) } } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index c0d87142bfd6d..122f62be237a1 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -165,6 +165,7 @@ export const FEATURE_FLAGS = { SURVEY_NPS_RESULTS: 'survey-nps-results', // owner: @liyiy // owner: #team-monitoring SESSION_RECORDING_ALLOW_V1_SNAPSHOTS: 'session-recording-allow-v1-snapshots', + HOGQL_INSIGHTS: 'hogql-insights', // owner: @mariusandra } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index e6a101068a6f9..123fc595765ca 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -82,6 +82,10 @@ export const defaultMocks: Mocks = { }, // We don't want to show the "new version available" banner in tests 'https://api.github.com/repos/posthog/posthog-js/tags': () => [200, []], + 'https://www.gravatar.com/avatar/:gravatar_id': () => [404, ''], + 'https://app.posthog.com/api/early_access_features': { + earlyAccessFeatures: [], + }, }, post: { 'https://app.posthog.com/e/': (): MockSignature => [200, 'ok'], diff --git a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts index cdffb15567877..5fb75476e0af3 100644 --- a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts +++ b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts @@ -162,8 +162,7 @@ export const dataTableLogic = kea([ showReload: query.showReload ?? showIfFull, showTimings: query.showTimings ?? flagQueryTimingsEnabled, showElapsedTime: - query.showTimings || - flagQueryTimingsEnabled || + (query.showTimings ?? flagQueryTimingsEnabled) || (query.showElapsedTime ?? ((flagQueryRunningTimeEnabled || source.kind === NodeKind.HogQLQuery) && showIfFull)), showColumnConfigurator: query.showColumnConfigurator ?? showIfFull, diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index 06ff315deeb24..2621f27fa3a64 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -10,6 +10,7 @@ import { isTimeToSeeDataSessionsNode, isHogQLQuery, isInsightVizNode, + isLifecycleQuery, } from './utils' import api, { ApiMethodOptions } from 'lib/api' import { getCurrentTeamId } from 'lib/utils/logics' @@ -27,6 +28,8 @@ import { toParams } from 'lib/utils' import { queryNodeToFilter } from './nodes/InsightQuery/utils/queryNodeToFilter' import { now } from 'lib/dayjs' import { currentSessionId } from 'lib/internalMetrics' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' const EXPORT_MAX_LIMIT = 10000 @@ -104,10 +107,14 @@ export async function query( const logParams: Record = {} const startTime = performance.now() + const hogQLInsightsFlagEnabled = Boolean( + featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS] + ) + try { if (isPersonsNode(queryNode)) { response = await api.get(getPersonsEndpoint(queryNode), methodOptions) - } else if (isInsightQueryNode(queryNode)) { + } else if (isInsightQueryNode(queryNode) && !(hogQLInsightsFlagEnabled && isLifecycleQuery(queryNode))) { const filters = queryNodeToFilter(queryNode) const params = { ...filters, diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 4412d012c5efb..5bcc6f9c57460 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1411,6 +1411,9 @@ ], "description": "Property filters for all series" }, + "response": { + "$ref": "#/definitions/LifecycleQueryResponse" + }, "samplingFactor": { "description": "Sampling rate", "type": ["number", "null"] @@ -1433,6 +1436,25 @@ "required": ["kind", "series"], "type": "object" }, + "LifecycleQueryResponse": { + "additionalProperties": false, + "properties": { + "result": { + "items": { + "type": "object" + }, + "type": "array" + }, + "timings": { + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["result"], + "type": "object" + }, "LifecycleToggle": { "enum": ["new", "resurrecting", "returning", "dormant"], "type": "string" diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 1a7814cc71cb7..24a10fac1d5b8 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -440,6 +440,11 @@ export type LifecycleFilter = Omit & { toggledLifecycles?: LifecycleToggle[] } // using everything except what it inherits from FilterType +export interface LifecycleQueryResponse { + result: Record[] + timings?: QueryTiming[] +} + export interface LifecycleQuery extends InsightsQueryBase { kind: NodeKind.LifecycleQuery /** Granularity of the response. Can be one of `hour`, `day`, `week` or `month` */ @@ -448,6 +453,7 @@ export interface LifecycleQuery extends InsightsQueryBase { series: (EventsNode | ActionsNode)[] /** Properties specific to the lifecycle insight */ lifecycleFilter?: LifecycleFilter + response?: LifecycleQueryResponse } export type InsightQueryNode = diff --git a/frontend/src/scenes/authentication/Login.tsx b/frontend/src/scenes/authentication/Login.tsx index 883c3215db9bb..b3c3cf1f70149 100644 --- a/frontend/src/scenes/authentication/Login.tsx +++ b/frontend/src/scenes/authentication/Login.tsx @@ -168,7 +168,9 @@ export function Login(): JSX.Element {
)} - + {!precheckResponse.saml_available && !precheckResponse.sso_enforcement && ( + + )} ) diff --git a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx index e66368e19db1f..b5d0f76e29e16 100644 --- a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx +++ b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx @@ -51,7 +51,7 @@ export function JSSnippet({ flagKey, variant }: SnippetProps): JSX.Element { Test that it works - {`posthog.feature_flags.override({'${flagKey}': '${variant}'})`} + {`posthog.featureFlags.override({'${flagKey}': '${variant}'})`} ) diff --git a/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx index 642ef3e6a88a6..5ac711aabde81 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagCodeInstructions.stories.tsx @@ -29,6 +29,7 @@ const REGULAR_FEATURE_FLAG: FeatureFlagType = { performed_rollback: false, can_edit: true, tags: [], + surveys: [], } const GROUP_FEATURE_FLAG: FeatureFlagType = { diff --git a/frontend/src/scenes/feature-flags/activityDescriptions.tsx b/frontend/src/scenes/feature-flags/activityDescriptions.tsx index 1210c89089463..f774616afe7ba 100644 --- a/frontend/src/scenes/feature-flags/activityDescriptions.tsx +++ b/frontend/src/scenes/feature-flags/activityDescriptions.tsx @@ -250,6 +250,7 @@ const featureFlagActionsMapping: Record< can_edit: () => null, analytics_dashboards: () => null, has_enriched_analytics: () => null, + surveys: () => null, } export function flagActivityDescriber(logItem: ActivityLogItem, asNotification?: boolean): HumanizedChange { diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts index 23aedb1086bba..f0516fe9956e1 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts @@ -37,6 +37,7 @@ function generateFeatureFlag( usage_dashboard: 1234, tags: [], has_enriched_analytics, + surveys: [], } } diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 24f90439d16f8..aeb4b9471f764 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -19,6 +19,8 @@ import { DashboardBasicType, NewEarlyAccessFeatureType, EarlyAccessFeatureType, + Survey, + SurveyQuestionType, } from '~/types' import api from 'lib/api' import { router, urlToAction } from 'kea-router' @@ -40,6 +42,7 @@ import { userLogic } from 'scenes/userLogic' import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' import { dashboardsLogic } from 'scenes/dashboard/dashboards/dashboardsLogic' import { NEW_EARLY_ACCESS_FEATURE } from 'scenes/early-access-features/earlyAccessFeatureLogic' +import { NEW_SURVEY, NewSurvey } from 'scenes/surveys/surveyLogic' const getDefaultRollbackCondition = (): FeatureFlagRollbackConditions => ({ operator: 'gt', @@ -73,6 +76,7 @@ const NEW_FLAG: FeatureFlagType = { experiment_set: null, features: [], rollback_conditions: [], + surveys: null, performed_rollback: false, can_edit: true, tags: [], @@ -414,6 +418,15 @@ export const featureFlagLogic = kea([ features: [...(state.features || []), newEarlyAccessFeature], } }, + createSurveySuccess: (state, { newSurvey }) => { + if (!state) { + return state + } + return { + ...state, + surveys: [...(state.surveys || []), newSurvey], + } + }, }, ], featureFlagMissing: [false, { setFeatureFlagMissing: () => true }], @@ -520,12 +533,33 @@ export const featureFlagLogic = kea([ null as EarlyAccessFeatureType | null, { createEarlyAccessFeature: async () => { - const updatedEarlyAccessFeature = { + const newEarlyAccessFeature = { ...NEW_EARLY_ACCESS_FEATURE, name: `Early access: ${values.featureFlag.key}`, feature_flag_id: values.featureFlag.id, } - return await api.earlyAccessFeatures.create(updatedEarlyAccessFeature as NewEarlyAccessFeatureType) + return await api.earlyAccessFeatures.create(newEarlyAccessFeature as NewEarlyAccessFeatureType) + }, + }, + ], + // used to generate a new survey + // but all subsequent operations after generation should occur via the surveyLogic + newSurvey: [ + null as Survey | null, + { + createSurvey: async () => { + const newSurvey = { + ...NEW_SURVEY, + name: `Survey: ${values.featureFlag.key}`, + linked_flag_id: values.featureFlag.id, + questions: [ + { + type: SurveyQuestionType.Open, + question: `What do you think of ${values.featureFlag.key}?`, + }, + ], + } + return await api.surveys.create(newSurvey as NewSurvey) }, }, ], @@ -869,6 +903,22 @@ export const featureFlagLogic = kea([ return (featureFlag?.features?.length || 0) > 0 }, ], + canCreateEarlyAccessFeature: [ + (s) => [s.featureFlag, s.variants], + (featureFlag, variants) => { + return ( + featureFlag && + featureFlag.filters.aggregation_group_type_index == undefined && + variants.length === 0 + ) + }, + ], + hasSurveys: [ + (s) => [s.featureFlag], + (featureFlag) => { + return featureFlag?.surveys && featureFlag.surveys.length > 0 + }, + ], }), urlToAction(({ actions, props }) => ({ [urls.featureFlag(props.id ?? 'new')]: (_, __, ___, { method }) => { diff --git a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx index 5f58cad124a1b..4a7d18ea42cce 100644 --- a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx @@ -6,6 +6,7 @@ import { ExtendedRegExpMatchArray, Attribute, NodeViewProps, + getExtensionField, } from '@tiptap/react' import { ReactNode, useCallback, useRef } from 'react' import clsx from 'clsx' @@ -212,12 +213,14 @@ export type CreatePostHogWidgetNodeOptions> widgets?: NotebookNodeWidget[] + serializedText?: (attributes: NotebookNodeAttributes) => string } export function createPostHogWidgetNode({ Component, pasteOptions, attributes, + serializedText, ...wrapperProps }: CreatePostHogWidgetNodeOptions): Node { // NOTE: We use NodeViewProps here as we convert them to NotebookNodeViewProps @@ -252,6 +255,19 @@ export function createPostHogWidgetNode( atom: true, draggable: true, + serializedText: serializedText, + + extendNodeSchema(extension) { + const context = { + name: extension.name, + options: extension.options, + storage: extension.storage, + } + return { + serializedText: getExtensionField(extension, 'serializedText', context), + } + }, + addAttributes() { return { height: {}, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx index 9935f9c6f1608..154600a7e1d3f 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx @@ -2,7 +2,16 @@ import { mergeAttributes, Node, NodeViewProps } from '@tiptap/core' import { NodeViewWrapper, ReactNodeViewRenderer } from '@tiptap/react' import { InsightModel, NotebookNodeType, NotebookTarget } from '~/types' import { Link } from '@posthog/lemon-ui' -import { IconGauge, IconBarChart, IconFlag, IconExperiment, IconLive, IconPerson, IconCohort } from 'lib/lemon-ui/icons' +import { + IconGauge, + IconBarChart, + IconFlag, + IconExperiment, + IconLive, + IconPerson, + IconCohort, + IconJournal, +} from 'lib/lemon-ui/icons' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { urls } from 'scenes/urls' import clsx from 'clsx' @@ -22,6 +31,7 @@ const ICON_MAP = { events: , persons: , cohorts: , + notebooks: , } const Component = (props: NodeViewProps): JSX.Element => { @@ -67,6 +77,8 @@ function backlinkHref(id: string, type: TaxonomicFilterGroupType): string { return urls.experiment(id) } else if (type === TaxonomicFilterGroupType.Dashboards) { return urls.dashboard(id) + } else if (type === TaxonomicFilterGroupType.Notebooks) { + return urls.notebook(id) } return '' } @@ -139,6 +151,16 @@ export const NotebookNodeBacklink = Node.create({ return { id: id, type: TaxonomicFilterGroupType.Dashboards, title: dashboard.name } }, }), + posthogNodePasteRule({ + find: urls.notebook('(.+)'), + editor: this.editor, + type: this.type, + getAttributes: async (match) => { + const id = match[1] + const notebook = await api.notebooks.get(id) + return { id: id, type: TaxonomicFilterGroupType.Notebooks, title: notebook.title } + }, + }), ] }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx index 0e315def449b9..066917f6f3c9a 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx @@ -2,7 +2,7 @@ import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' import { FeatureFlagType, NotebookNodeType } from '~/types' import { BindLogic, useActions, useValues } from 'kea' import { featureFlagLogic, FeatureFlagLogicProps } from 'scenes/feature-flags/featureFlagLogic' -import { IconFlag, IconRecording, IconRocketLaunch } from 'lib/lemon-ui/icons' +import { IconFlag, IconRecording, IconRocketLaunch, IconSurveys } from 'lib/lemon-ui/icons' import clsx from 'clsx' import { LemonButton, LemonDivider } from '@posthog/lemon-ui' import { urls } from 'scenes/urls' @@ -15,6 +15,7 @@ import { FeatureFlagReleaseConditions } from 'scenes/feature-flags/FeatureFlagRe import api from 'lib/api' import { buildEarlyAccessFeatureContent } from './NotebookNodeEarlyAccessFeature' import { notebookNodeFlagLogic } from './NotebookNodeFlagLogic' +import { buildSurveyContent } from './NotebookNodeSurvey' const Component = (props: NotebookNodeViewProps): JSX.Element => { const { id } = props.attributes @@ -24,12 +25,17 @@ const Component = (props: NotebookNodeViewProps): JS recordingFilterForFlag, hasEarlyAccessFeatures, newEarlyAccessFeatureLoading, + canCreateEarlyAccessFeature, + hasSurveys, + newSurveyLoading, } = useValues(featureFlagLogic({ id })) - const { createEarlyAccessFeature } = useActions(featureFlagLogic({ id })) + const { createEarlyAccessFeature, createSurvey } = useActions(featureFlagLogic({ id })) const { expanded, nextNode } = useValues(notebookNodeLogic) const { insertAfter } = useActions(notebookNodeLogic) - const { shouldDisableInsertEarlyAccessFeature } = useValues(notebookNodeFlagLogic({ id, insertAfter })) + const { shouldDisableInsertEarlyAccessFeature, shouldDisableInsertSurvey } = useValues( + notebookNodeFlagLogic({ id, insertAfter }) + ) return (
@@ -64,37 +70,67 @@ const Component = (props: NotebookNodeViewProps): JS
+ {canCreateEarlyAccessFeature && ( + } + loading={newEarlyAccessFeatureLoading} + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + + if (!hasEarlyAccessFeatures) { + createEarlyAccessFeature() + } else { + if ((featureFlag?.features?.length || 0) <= 0) { + return + } + if (!shouldDisableInsertEarlyAccessFeature(nextNode) && featureFlag.features) { + insertAfter(buildEarlyAccessFeatureContent(featureFlag.features[0].id)) + } + } + }} + disabledReason={ + shouldDisableInsertEarlyAccessFeature(nextNode) && + 'Early access feature already exists below' + } + > + {hasEarlyAccessFeatures ? 'View' : 'Create'} early access feature + + )} } - loading={newEarlyAccessFeatureLoading} + icon={} + loading={newSurveyLoading} onClick={(e) => { // prevent expanding the node if it isn't expanded e.stopPropagation() - if (!hasEarlyAccessFeatures) { - createEarlyAccessFeature() + + if (!hasSurveys) { + createSurvey() } else { - if ((featureFlag?.features?.length || 0) <= 0) { + if ((featureFlag?.surveys?.length || 0) <= 0) { return } - if (!shouldDisableInsertEarlyAccessFeature(nextNode) && featureFlag.features) { - insertAfter(buildEarlyAccessFeatureContent(featureFlag.features[0].id)) + if (!shouldDisableInsertSurvey(nextNode) && featureFlag.surveys) { + insertAfter(buildSurveyContent(featureFlag.surveys[0].id)) } } }} - disabledReason={ - shouldDisableInsertEarlyAccessFeature(nextNode) && - 'Early access feature already exists below' - } + disabledReason={shouldDisableInsertSurvey(nextNode) && 'Survey already exists below'} > - {hasEarlyAccessFeatures ? 'View' : 'Create'} early access feature + {hasSurveys ? 'View' : 'Create'} survey } - onClick={() => { + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + if (nextNode?.type.name !== NotebookNodeType.FeatureFlagCodeExample) { insertAfter(buildCodeExampleContent(id)) } @@ -107,7 +143,10 @@ const Component = (props: NotebookNodeViewProps): JS Show implementation { + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + if (nextNode?.type.name !== NotebookNodeType.RecordingPlaylist) { insertAfter(buildPlaylistContent(recordingFilterForFlag)) } diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx index b597575854e69..aa0ed54d437d7 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx @@ -5,6 +5,7 @@ import { buildEarlyAccessFeatureContent } from './NotebookNodeEarlyAccessFeature import { NotebookNodeType } from '~/types' import type { notebookNodeFlagLogicType } from './NotebookNodeFlagLogicType' +import { buildSurveyContent } from './NotebookNodeSurvey' export type NotebookNodeFlagLogicProps = { id: FeatureFlagLogicProps['id'] @@ -17,13 +18,16 @@ export const notebookNodeFlagLogic = kea([ key(({ id }) => id), connect((props: NotebookNodeFlagLogicProps) => ({ - actions: [featureFlagLogic({ id: props.id }), ['createEarlyAccessFeatureSuccess']], - values: [featureFlagLogic({ id: props.id }), ['featureFlag', 'hasEarlyAccessFeatures']], + actions: [featureFlagLogic({ id: props.id }), ['createEarlyAccessFeatureSuccess', 'createSurveySuccess']], + values: [featureFlagLogic({ id: props.id }), ['featureFlag', 'hasEarlyAccessFeatures', 'hasSurveys']], })), listeners(({ props }) => ({ createEarlyAccessFeatureSuccess: async ({ newEarlyAccessFeature }) => { props.insertAfter(buildEarlyAccessFeatureContent(newEarlyAccessFeature.id)) }, + createSurveySuccess: async ({ newSurvey }) => { + props.insertAfter(buildSurveyContent(newSurvey.id)) + }, })), selectors({ shouldDisableInsertEarlyAccessFeature: [ @@ -39,5 +43,18 @@ export const notebookNodeFlagLogic = kea([ ) }, ], + shouldDisableInsertSurvey: [ + (s) => [s.featureFlag, s.hasSurveys], + (featureFlag, hasSurveys) => + (nextNode: Node | null): boolean => { + return ( + (nextNode?.type.name === NotebookNodeType.Survey && + hasSurveys && + featureFlag.surveys && + nextNode?.attrs.id === featureFlag.surveys[0].id) || + false + ) + }, + ], }), ]) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx index effdf63d7afcf..8dc4e00839409 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx @@ -79,6 +79,10 @@ export const NotebookNodeImage = createPostHogWidgetNode { + // TODO file is null when this runs... should it be? + return attrs?.file?.name || '' + }, heightEstimate: 400, minHeight: 100, resizeable: true, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx index a8640e956759a..d582171f9690a 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx @@ -76,4 +76,9 @@ export const NotebookNodePerson = createPostHogWidgetNode { + const personTitle = attrs?.title || '' + const personId = attrs?.id || '' + return `${personTitle} ${personId}`.trim() + }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx index 3616fe485725a..c4bc461c68bcf 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx @@ -12,6 +12,7 @@ import { urls } from 'scenes/urls' import api from 'lib/api' import './NotebookNodeQuery.scss' +import { containsHogQLQuery, isHogQLQuery, isNodeWithSource } from '~/queries/utils' const DEFAULT_QUERY: QuerySchema = { kind: NodeKind.DataTableNode, @@ -123,6 +124,12 @@ export const NotebookNodeQuery = createPostHogWidgetNode { + let text = '' + const q = attrs.query + if (containsHogQLQuery(q)) { + if (isHogQLQuery(q)) { + text = q.query + } + if (isNodeWithSource(q)) { + text = isHogQLQuery(q.source) ? q.source.query : '' + } + } + return text + }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx index dafa271b98725..766fd500a2f5b 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx @@ -106,6 +106,9 @@ export const NotebookNodeRecording = createPostHogWidgetNode { + return attrs.id + }, }) export function sessionRecordingPlayerProps(id: SessionRecordingId): SessionRecordingPlayerProps { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx index ec49f4445d005..88db6f4395ffc 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx @@ -75,6 +75,12 @@ export const NotebookNodeReplayTimestamp = Node.create({ group: 'inline', atom: true, + serializedText: (attrs: NotebookNodeReplayTimestampAttrs): string => { + // timestamp is not a block so `getText` does not add a separator. + // we need to add it manually + return `${attrs.playbackTime ? formatTimestamp(attrs.playbackTime) : '00:00'}:\n` + }, + addAttributes() { return { playbackTime: { default: null, keepOnSplit: false }, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx index 6ae601eeeab8b..d0b0cf87742b5 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx @@ -6,7 +6,7 @@ import { LemonButton, LemonDivider } from '@posthog/lemon-ui' import { urls } from 'scenes/urls' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { notebookNodeLogic } from './notebookNodeLogic' -import { NotebookNodeViewProps } from '../Notebook/utils' +import { JSONContent, NotebookNodeViewProps } from '../Notebook/utils' import { buildFlagContent } from './NotebookNodeFlag' import { defaultSurveyAppearance, surveyLogic } from 'scenes/surveys/surveyLogic' import { StatusTag } from 'scenes/surveys/Surveys' @@ -140,3 +140,10 @@ export const NotebookNodeSurvey = createPostHogWidgetNode(functi const { editor } = useValues(notebookLogic) const onSelect = ( - { type }: TaxonomicFilterGroup, + group: TaxonomicFilterGroup, value: TaxonomicFilterValue, - { id, name }: { id: number; name: string } + item: TaxonomicDefinitionTypes ): void => { if (!editor) { return } const attrs = { - id: type === TaxonomicFilterGroupType.Events ? id : value, - title: name, - type: type, + id: group.type === TaxonomicFilterGroupType.Events ? item.id : value, + title: group.getName?.(item), + type: group.type, } editor @@ -81,6 +82,7 @@ const BacklinkCommands = forwardRef(functi TaxonomicFilterGroupType.FeatureFlags, TaxonomicFilterGroupType.Experiments, TaxonomicFilterGroupType.Dashboards, + TaxonomicFilterGroupType.Notebooks, ], optionsFromProp: undefined, popoverEnabled: true, diff --git a/frontend/src/scenes/notebooks/Notebook/Editor.tsx b/frontend/src/scenes/notebooks/Notebook/Editor.tsx index 60d1a67d08bcb..2a41bcce88209 100644 --- a/frontend/src/scenes/notebooks/Notebook/Editor.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Editor.tsx @@ -3,7 +3,7 @@ import { useActions } from 'kea' import { useCallback, useRef } from 'react' import { Editor as TTEditor } from '@tiptap/core' -import { useEditor, EditorContent } from '@tiptap/react' +import { EditorContent, useEditor } from '@tiptap/react' import { FloatingMenu } from '@tiptap/extension-floating-menu' import StarterKit from '@tiptap/starter-kit' import ExtensionPlaceholder from '@tiptap/extension-placeholder' @@ -25,7 +25,7 @@ import { lemonToast } from '@posthog/lemon-ui' import { NotebookNodeType } from '~/types' import { NotebookNodeImage } from '../Nodes/NotebookNodeImage' -import { JSONContent, NotebookEditor, EditorFocusPosition, EditorRange, Node } from './utils' +import { EditorFocusPosition, EditorRange, JSONContent, Node, NotebookEditor, textContent } from './utils' import { SlashCommandsExtension } from './SlashCommands' import { BacklinkCommandsExtension } from './BacklinkCommands' import { NotebookNodeEarlyAccessFeature } from '../Nodes/NotebookNodeEarlyAccessFeature' @@ -182,6 +182,7 @@ export function Editor({ onCreate({ getJSON: () => editor.getJSON(), + getText: () => textContent(editor.state.doc), getEndPosition: () => editor.state.doc.content.size, getSelectedNode: () => editor.state.doc.nodeAt(editor.state.selection.$anchor.pos), getAdjacentNodes: (pos: number) => getAdjacentNodes(editor, pos), diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index 589e733a028c2..c6906e0a76f8a 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -145,7 +145,7 @@ &__content { max-height: calc(100vh - 220px); - overflow: scroll; + overflow: auto; } } diff --git a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx index 5ecd3b7951a2b..87d5ee8c1e5c2 100644 --- a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx @@ -3,7 +3,19 @@ import Suggestion from '@tiptap/suggestion' import { ReactRenderer } from '@tiptap/react' import { LemonButton, LemonDivider, lemonToast } from '@posthog/lemon-ui' -import { IconCohort, IconQueryEditor, IconRecording, IconTableChart, IconUploadFile } from 'lib/lemon-ui/icons' +import { + IconCohort, + IconRecording, + IconTableChart, + IconUploadFile, + InsightSQLIcon, + InsightsFunnelsIcon, + InsightsLifecycleIcon, + InsightsPathsIcon, + InsightsRetentionIcon, + InsightsStickinessIcon, + InsightsTrendsIcon, +} from 'lib/lemon-ui/icons' import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useState } from 'react' import { EditorCommands, EditorRange } from './utils' import { NotebookNodeType } from '~/types' @@ -57,10 +69,179 @@ const TEXT_CONTROLS: SlashCommandsItem[] = [ ] const SLASH_COMMANDS: SlashCommandsItem[] = [ + { + title: 'Trend', + search: 'trend insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'TrendsQuery', + filterTestAccounts: false, + series: [ + { + kind: 'EventsNode', + event: '$pageview', + name: '$pageview', + math: 'total', + }, + ], + interval: 'day', + trendsFilter: { + display: 'ActionsLineGraph', + }, + }, + }, + }, + }), + }, + { + title: 'Funnel', + search: 'funnel insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'FunnelsQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + }, + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + }, + ], + funnelsFilter: { + funnel_viz_type: 'steps', + }, + }, + }, + }, + }), + }, + { + title: 'Retention', + search: 'retention insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'RetentionQuery', + retentionFilter: { + period: 'Day', + total_intervals: 11, + target_entity: { + id: '$pageview', + name: '$pageview', + type: 'events', + }, + returning_entity: { + id: '$pageview', + name: '$pageview', + type: 'events', + }, + retention_type: 'retention_first_time', + }, + }, + }, + }, + }), + }, + { + title: 'Paths', + search: 'paths insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'PathsQuery', + pathsFilter: { + include_event_types: ['$pageview'], + }, + }, + }, + }, + }), + }, + { + title: 'Stickiness', + search: 'stickiness insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'StickinessQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + math: 'total', + }, + ], + stickinessFilter: {}, + }, + }, + }, + }), + }, + { + title: 'Lifecycle', + search: 'lifecycle insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'LifecycleQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + math: 'total', + }, + ], + lifecycleFilter: { + shown_as: 'Lifecycle', + }, + }, + full: true, + }, + }, + }), + }, { title: 'HogQL', search: 'sql', - icon: , + icon: , command: (chain) => chain.insertContent({ type: NotebookNodeType.Query, attrs: { query: examples['HogQLTable'] } }), }, diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index 84c8efa165372..a19b5f09dd1c5 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -178,6 +178,7 @@ export const notebookLogic = kea([ response = { ...values.scratchpadNotebook, content: {}, + text_content: null, version: 0, } } else if (props.shortId.startsWith('template-')) { @@ -210,6 +211,7 @@ export const notebookLogic = kea([ const response = await api.notebooks.update(values.notebook.short_id, { version: values.notebook.version, content: notebook.content, + text_content: values.editor?.getText() || '', title: notebook.title, }) @@ -242,6 +244,7 @@ export const notebookLogic = kea([ // We use the local content if set otherwise the notebook content. That way it supports templates, scratchpad etc. const response = await api.notebooks.create({ content: values.content || values.notebook.content, + text_content: values.editor?.getText() || '', title: values.title || values.notebook.title, }) @@ -430,6 +433,7 @@ export const notebookLogic = kea([ return } const jsonContent = values.editor.getJSON() + actions.setLocalContent(jsonContent) actions.onUpdateEditor() }, diff --git a/frontend/src/scenes/notebooks/Notebook/utils.ts b/frontend/src/scenes/notebooks/Notebook/utils.ts index 6947a4ef4a186..44feef88726e0 100644 --- a/frontend/src/scenes/notebooks/Notebook/utils.ts +++ b/frontend/src/scenes/notebooks/Notebook/utils.ts @@ -6,6 +6,7 @@ import { getText, JSONContent as TTJSONContent, Range as EditorRange, + TextSerializer, } from '@tiptap/core' import { Node as PMNode } from '@tiptap/pm/model' import { NodeViewProps } from '@tiptap/react' @@ -48,12 +49,13 @@ export type NotebookNodeWidget = { key: string label: string icon: JSX.Element - // using 'any' here shouldn't be necessary but I couldn't figure out how to set a generic on the notebookNodeLogic props + // using 'any' here shouldn't be necessary but, I couldn't figure out how to set a generic on the notebookNodeLogic props Component: ({ attributes, updateAttributes }: NotebookNodeAttributeProperties) => JSX.Element } export interface NotebookEditor { getJSON: () => JSONContent + getText: () => string getEndPosition: () => number getSelectedNode: () => Node | null getAdjacentNodes: (pos: number) => { previous: Node | null; next: Node | null } @@ -88,12 +90,39 @@ export const isCurrentNodeEmpty = (editor: TTEditor): boolean => { return false } -const textContent = (node: any): string => { +export const textContent = (node: any): string => { + // we've extended the node schema to support a custom serializedText function + // each custom node type needs to implement this function, or have an alternative in the map below + const customOrTitleSerializer: TextSerializer = (props): string => { + // TipTap chooses whether to add a separator based on a couple of factors + // but, we always want a separator since this text is for search purposes + const serializedText = props.node.type.spec.serializedText(props.node.attrs) || props.node.attrs?.title || '' + if (serializedText.length > 0 && serializedText[serializedText.length - 1] !== '\n') { + return serializedText + '\n' + } + return serializedText + } + + // we want the type system to complain if we forget to add a custom serializer + const customNodeTextSerializers: Record = { + 'ph-backlink': customOrTitleSerializer, + 'ph-early-access-feature': customOrTitleSerializer, + 'ph-experiment': customOrTitleSerializer, + 'ph-feature-flag': customOrTitleSerializer, + 'ph-feature-flag-code-example': customOrTitleSerializer, + 'ph-image': customOrTitleSerializer, + 'ph-insight': customOrTitleSerializer, + 'ph-person': customOrTitleSerializer, + 'ph-query': customOrTitleSerializer, + 'ph-recording': customOrTitleSerializer, + 'ph-recording-playlist': customOrTitleSerializer, + 'ph-replay-timestamp': customOrTitleSerializer, + 'ph-survey': customOrTitleSerializer, + } + return getText(node, { - blockSeparator: ' ', - textSerializers: { - [NotebookNodeType.ReplayTimestamp]: ({ node }) => `${node.attrs.playbackTime || '00:00'}: `, - }, + blockSeparator: '\n', + textSerializers: customNodeTextSerializers, }) } diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx similarity index 93% rename from frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx rename to frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx index 1ac9395728811..1060246c67d27 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx @@ -9,7 +9,6 @@ import recordingSnapshotsJson from 'scenes/session-recordings/__mocks__/recordin import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json' import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query' import recording_playlists from './__mocks__/recording_playlists.json' -import { ReplayTabs } from '~/types' const meta: Meta = { title: 'Scenes-App/Recordings', @@ -17,6 +16,7 @@ const meta: Meta = { layout: 'fullscreen', viewMode: 'story', mockDate: '2023-02-01', + waitForSelector: '.PlayerFrame__content .replayer-wrapper iframe', }, decorators: [ mswDecorator({ @@ -81,7 +81,7 @@ const meta: Meta = { }, ] }, - '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings?limit=100': (req) => { + '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings': (req) => { const playlistId = req.params.playlist_id const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] @@ -89,6 +89,12 @@ const meta: Meta = { // without the session-recording-blob-replay feature flag, we only load via ClickHouse '/api/projects/:team/session_recordings/:id/snapshots': recordingSnapshotsJson, '/api/projects/:team/session_recordings/:id': recordingMetaJson, + 'api/projects/:team/notebooks': { + count: 0, + next: null, + previous: null, + results: [], + }, }, post: { '/api/projects/:team/query': recordingEventsJson, @@ -97,16 +103,10 @@ const meta: Meta = { ], } export default meta -export function RecordingsList(): JSX.Element { - useEffect(() => { - router.actions.push(urls.replay()) - }, []) - return -} -export function RecordingsPlayLists(): JSX.Element { +export function RecentRecordings(): JSX.Element { useEffect(() => { - router.actions.push(urls.replay(ReplayTabs.Playlists)) + router.actions.push(urls.replay()) }, []) return } diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx new file mode 100644 index 0000000000000..657fbccf4bc29 --- /dev/null +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx @@ -0,0 +1,48 @@ +import { Meta } from '@storybook/react' +import { useEffect } from 'react' +import { mswDecorator } from '~/mocks/browser' +import { router } from 'kea-router' +import { urls } from 'scenes/urls' +import { App } from 'scenes/App' +import recording_playlists from './__mocks__/recording_playlists.json' +import { ReplayTabs } from '~/types' +import recordings from 'scenes/session-recordings/__mocks__/recordings.json' +import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query' + +const meta: Meta = { + title: 'Scenes-App/Recordings', + parameters: { + layout: 'fullscreen', + viewMode: 'story', + mockDate: '2023-02-01', + }, + decorators: [ + mswDecorator({ + get: { + '/api/projects/:team_id/session_recording_playlists': recording_playlists, + '/api/projects/:team_id/session_recordings': (req) => { + const version = req.url.searchParams.get('version') + return [ + 200, + { + has_next: false, + results: recordings, + version, + }, + ] + }, + }, + post: { + '/api/projects/:team/query': recordingEventsJson, + }, + }), + ], +} +export default meta + +export function RecordingsPlayLists(): JSX.Element { + useEffect(() => { + router.actions.push(urls.replay(ReplayTabs.Playlists)) + }, []) + return +} diff --git a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json index f2db148045646..0afa00a98d244 100644 --- a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json +++ b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json @@ -1,6 +1,6 @@ [ { - "id": "$pageview", + "id": "$pageview1", "event": "$pageview", "name": "$event_before_recording_starts", "type": "events", @@ -14,7 +14,7 @@ "elements_hash": "" }, { - "id": "$pageview", + "id": "$pageview2", "name": "$pageview", "event": "$pageview", "type": "events", diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index fc5f0cc5ed158..451f1cf616f8a 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -4,16 +4,18 @@ import { } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' import { useActions, useValues } from 'kea' import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' -import { IconComment, IconDelete, IconLink } from 'lib/lemon-ui/icons' +import { IconComment, IconDelete, IconJournalPlus, IconLink } from 'lib/lemon-ui/icons' import { openPlayerShareDialog } from 'scenes/session-recordings/player/share/PlayerShare' import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { NotebookNodeType } from '~/types' +import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' export function PlayerMetaLinks(): JSX.Element { const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic) const { setPause, deleteRecording } = useActions(sessionRecordingPlayerLogic) + const nodeLogic = useNotebookNode() const getCurrentPlayerTime = (): number => { // NOTE: We pull this value at call time as otherwise it would trigger re-renders if pulled from the hook @@ -78,9 +80,24 @@ export function PlayerMetaLinks(): JSX.Element { Share - - Pin - + {nodeLogic ? ( + nodeLogic.props.nodeType !== NotebookNodeType.Recording ? ( + } + size="small" + onClick={() => { + nodeLogic.actions.insertAfter({ + type: NotebookNodeType.Recording, + attrs: { id: sessionRecordingId }, + }) + }} + /> + ) : null + ) : ( + + Pin + + )} {logicProps.playerKey !== 'modal' && ( { setFilters({ - date_from: '-21d', + date_from: '-30d', }) }} > - Search over the last 21 days + Search over the last 30 days ) : ( diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index 80c207d3c7644..3b0b46e89ca94 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -319,7 +319,9 @@ function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element { kind: NodeKind.TrendsQuery, dateRange: { date_from: dayjs(survey.created_at).format('YYYY-MM-DD'), - date_to: dayjs().format('YYYY-MM-DD'), + date_to: survey.end_date + ? dayjs(survey.end_date).format('YYYY-MM-DD') + : dayjs().format('YYYY-MM-DD'), }, series: [ { diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index fc0de1bbb63c9..b482f8e794949 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -55,7 +55,7 @@ export const defaultSurveyAppearance = { thankYouMessageHeader: 'Thank you for your feedback!', } -const NEW_SURVEY: NewSurvey = { +export const NEW_SURVEY: NewSurvey = { id: 'new', name: '', description: '', @@ -254,17 +254,28 @@ export const surveyLogic = kea([ if (surveyId === 'new') { return null } + const createdAt = (survey as Survey).created_at const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'` const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'` return { surveysShown: { kind: NodeKind.DataTableNode, - source: { kind: NodeKind.HogQLQuery, query: surveysShownHogqlQuery }, + source: { + kind: NodeKind.HogQLQuery, + query: surveysShownHogqlQuery, + filters: { dateRange: { date_from: dayjs(createdAt).format('YYYY-MM-DD') } }, + }, + showTimings: false, }, surveysDismissed: { kind: NodeKind.DataTableNode, - source: { kind: NodeKind.HogQLQuery, query: surveysDismissedHogqlQuery }, + source: { + kind: NodeKind.HogQLQuery, + query: surveysDismissedHogqlQuery, + filters: { dateRange: { date_from: dayjs(createdAt).format('YYYY-MM-DD') } }, + }, + showTimings: false, }, } }, @@ -304,6 +315,11 @@ export const surveyLogic = kea([ surveyMultipleChoiceQuery: [ (s) => [s.survey], (survey): DataTableNode | null => { + if (survey.id === 'new') { + return null + } + const createdAt = (survey as Survey).created_at + const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc` const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc` return { @@ -314,7 +330,13 @@ export const surveyLogic = kea([ survey.questions[0].type === SurveyQuestionType.SingleChoice ? singleChoiceQuery : multipleChoiceQuery, + filters: { + dateRange: { + date_from: dayjs(createdAt).format('YYYY-MM-DD'), + }, + }, }, + showTimings: false, } }, ], diff --git a/frontend/src/types.ts b/frontend/src/types.ts index cb40bbbaf4e09..1a6fa38a6d03e 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -2191,6 +2191,7 @@ export interface FeatureFlagType extends Omit=7.21.4", "@tiptap/core": "^2.1.0-rc.12", @@ -125,7 +125,7 @@ "kea-window-values": "^3.0.0", "md5": "^2.3.0", "monaco-editor": "^0.39.0", - "posthog-js": "1.78.2", + "posthog-js": "1.78.4", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -154,7 +154,7 @@ "react-virtualized": "^9.22.5", "require-from-string": "^2.0.2", "resize-observer-polyfill": "^1.5.1", - "rrweb": "^2.0.0-alpha.9", + "rrweb": "^2.0.0-alpha.11", "sass": "^1.26.2", "use-debounce": "^9.0.3", "use-resize-observer": "^8.0.0", diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index e0d9e3c49ad13..9cecab54d8c4a 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -44,6 +44,7 @@ export function getDefaultConfig(): PluginsServerConfig { KAFKA_SASL_PASSWORD: undefined, KAFKA_CLIENT_RACK: undefined, KAFKA_CONSUMPTION_USE_RDKAFKA: false, // Transitional setting, ignored for consumers that only support one library + KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: true, // If true, use the cooperative rebalance strategy, otherwise uses the default ('range,roundrobin') KAFKA_CONSUMPTION_MAX_BYTES: 10_485_760, // Default value for kafkajs KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: 1_048_576, // Default value for kafkajs, must be bigger than message size KAFKA_CONSUMPTION_MAX_WAIT_MS: 1_000, // Down from the 5s default for kafkajs diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts index 3acae7a88b57d..03c9e2de6db37 100644 --- a/plugin-server/src/kafka/batch-consumer.ts +++ b/plugin-server/src/kafka/batch-consumer.ts @@ -33,6 +33,7 @@ export const startBatchConsumer = async ({ topicCreationTimeoutMs, eachBatch, autoCommit = true, + cooperativeRebalance = true, queuedMinMessages = 100000, }: { connectionConfig: GlobalConfig @@ -48,6 +49,7 @@ export const startBatchConsumer = async ({ topicCreationTimeoutMs: number eachBatch: (messages: Message[]) => Promise autoCommit?: boolean + cooperativeRebalance?: boolean queuedMinMessages?: number }): Promise => { // Starts consuming from `topic` in batches of `fetchBatchSize` messages, @@ -113,12 +115,12 @@ export const startBatchConsumer = async ({ // https://www.confluent.io/en-gb/blog/incremental-cooperative-rebalancing-in-kafka/ // for details on the advantages of this rebalancing strategy as well as // how it works. - 'partition.assignment.strategy': 'cooperative-sticky', + 'partition.assignment.strategy': cooperativeRebalance ? 'cooperative-sticky' : 'range,roundrobin', rebalance_cb: true, offset_commit_cb: true, }) - instrumentConsumerMetrics(consumer, groupId) + instrumentConsumerMetrics(consumer, groupId, cooperativeRebalance) let isShuttingDown = false let lastLoopTime = Date.now() diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts index 32a6594009f7a..62b8e951ebc9f 100644 --- a/plugin-server/src/kafka/consumer.ts +++ b/plugin-server/src/kafka/consumer.ts @@ -59,17 +59,20 @@ export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => { export function countPartitionsPerTopic(assignments: Assignment[]): Map { const partitionsPerTopic = new Map() for (const assignment of assignments) { - if (assignment.topic in partitionsPerTopic) { + if (partitionsPerTopic.has(assignment.topic)) { partitionsPerTopic.set(assignment.topic, partitionsPerTopic.get(assignment.topic) + 1) } else { partitionsPerTopic.set(assignment.topic, 1) } } - return partitionsPerTopic } -export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: string) => { +export const instrumentConsumerMetrics = ( + consumer: RdKafkaConsumer, + groupId: string, + cooperativeRebalance: boolean +) => { // For each message consumed, we record the latest timestamp processed for // each partition assigned to this consumer group member. This consumer // should only provide metrics for the partitions that are assigned to it, @@ -94,6 +97,7 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st // // TODO: add other relevant metrics here // TODO: expose the internal librdkafka metrics as well. + const strategyString = cooperativeRebalance ? 'cooperative' : 'eager' consumer.on('rebalance', (error: LibrdKafkaError, assignments: TopicPartition[]) => { /** * see https://github.com/Blizzard/node-rdkafka#rebalancing errors are used to signal @@ -103,14 +107,22 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st * And when the balancing is completed the new assignments are received with ERR__ASSIGN_PARTITIONS */ if (error.code === CODES.ERRORS.ERR__ASSIGN_PARTITIONS) { - status.info('📝️', 'librdkafka rebalance, partitions assigned', { assignments }) + status.info('📝️', `librdkafka ${strategyString} rebalance, partitions assigned`, { assignments }) for (const [topic, count] of countPartitionsPerTopic(assignments)) { - kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count) + if (cooperativeRebalance) { + kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count) + } else { + kafkaRebalancePartitionCount.labels({ topic: topic }).set(count) + } } } else if (error.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) { - status.info('📝️', 'librdkafka rebalance started, partitions revoked', { assignments }) + status.info('📝️', `librdkafka ${strategyString} rebalance started, partitions revoked`, { assignments }) for (const [topic, count] of countPartitionsPerTopic(assignments)) { - kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count) + if (cooperativeRebalance) { + kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count) + } else { + kafkaRebalancePartitionCount.labels({ topic: topic }).set(count) + } } } else { // We had a "real" error diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts index a97d034778ac4..4d12925f0ce6b 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts @@ -4,20 +4,18 @@ import { EachBatchPayload, KafkaMessage } from 'kafkajs' import { RawClickHouseEvent } from '../../../types' import { convertToIngestionEvent } from '../../../utils/event' import { status } from '../../../utils/status' -import { groupIntoBatches } from '../../../utils/utils' import { runInstrumentedFunction } from '../../utils' import { KafkaJSIngestionConsumer } from '../kafka-queue' import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics' +import { eachBatchHandlerHelper } from './each-batch-webhooks' // Must require as `tsc` strips unused `import` statements and just requiring this seems to init some globals require('@sentry/tracing') export async function eachMessageAppsOnEventHandlers( - message: KafkaMessage, + clickHouseEvent: RawClickHouseEvent, queue: KafkaJSIngestionConsumer ): Promise { - const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent - const pluginConfigs = queue.pluginsServer.pluginConfigsPerTeam.get(clickHouseEvent.team_id) if (pluginConfigs) { // Elements parsing can be extremely slow, so we skip it for some plugins @@ -50,7 +48,14 @@ export async function eachBatchAppsOnEventHandlers( payload: EachBatchPayload, queue: KafkaJSIngestionConsumer ): Promise { - await eachBatch(payload, queue, eachMessageAppsOnEventHandlers, groupIntoBatches, 'async_handlers_on_event') + await eachBatchHandlerHelper( + payload, + (teamId) => queue.pluginsServer.pluginConfigsPerTeam.has(teamId), + (event) => eachMessageAppsOnEventHandlers(event, queue), + queue.pluginsServer.statsd, + queue.pluginsServer.WORKER_CONCURRENCY * queue.pluginsServer.TASKS_PER_WORKER, + 'on_event' + ) } export async function eachBatch( diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts index 427297a613b1b..fb671f0cd9633 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts @@ -17,10 +17,10 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics' require('@sentry/tracing') // exporting only for testing -export function groupIntoBatchesWebhooks( +export function groupIntoBatchesByUsage( array: KafkaMessage[], batchSize: number, - actionMatcher: ActionMatcher + shouldProcess: (teamId: number) => boolean ): { eventBatch: RawClickHouseEvent[]; lastOffset: string; lastTimestamp: string }[] { // Most events will not trigger a webhook call, so we want to filter them out as soon as possible // to achieve the highest effective concurrency when executing the actual HTTP calls. @@ -32,7 +32,7 @@ export function groupIntoBatchesWebhooks( let currentCount = 0 array.forEach((message, index) => { const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent - if (actionMatcher.hasWebhooks(clickHouseEvent.team_id)) { + if (shouldProcess(clickHouseEvent.team_id)) { currentBatch.push(clickHouseEvent) currentCount++ } else { @@ -58,18 +58,36 @@ export async function eachBatchWebhooksHandlers( hookCannon: HookCommander, statsd: StatsD | undefined, concurrency: number +): Promise { + await eachBatchHandlerHelper( + payload, + (teamId) => actionMatcher.hasWebhooks(teamId), + (event) => eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd), + statsd, + concurrency, + 'webhooks' + ) +} + +export async function eachBatchHandlerHelper( + payload: EachBatchPayload, + shouldProcess: (teamId: number) => boolean, + eachMessageHandler: (event: RawClickHouseEvent) => Promise, + statsd: StatsD | undefined, + concurrency: number, + stats_key: string ): Promise { // similar to eachBatch function in each-batch.ts, but without the dependency on the KafkaJSIngestionConsumer // & handling the different batching return type - const key = 'async_handlers_webhooks' + const key = `async_handlers_${stats_key}` const batchStartTimer = new Date() const loggingKey = `each_batch_${key}` const { batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, isRunning, isStale }: EachBatchPayload = payload - const transaction = Sentry.startTransaction({ name: `eachBatchWebhooks` }) + const transaction = Sentry.startTransaction({ name: `eachBatch${stats_key}` }) try { - const batchesWithOffsets = groupIntoBatchesWebhooks(batch.messages, concurrency, actionMatcher) + const batchesWithOffsets = groupIntoBatchesByUsage(batch.messages, concurrency, shouldProcess) statsd?.histogram('ingest_event_batching.input_length', batch.messages.length, { key: key }) statsd?.histogram('ingest_event_batching.batch_count', batchesWithOffsets.length, { key: key }) @@ -88,9 +106,7 @@ export async function eachBatchWebhooksHandlers( } await Promise.all( - eventBatch.map((event: RawClickHouseEvent) => - eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd).finally(() => heartbeat()) - ) + eventBatch.map((event: RawClickHouseEvent) => eachMessageHandler(event).finally(() => heartbeat())) ) resolveOffset(lastOffset) diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts index da51173e0507f..7989efd4b356a 100644 --- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts +++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts @@ -255,6 +255,7 @@ export class IngestionConsumer { consumerMaxWaitMs: this.pluginsServer.KAFKA_CONSUMPTION_MAX_WAIT_MS, fetchBatchSize: 500, topicCreationTimeoutMs: this.pluginsServer.KAFKA_TOPIC_CREATION_TIMEOUT_MS, + cooperativeRebalance: this.pluginsServer.KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE, eachBatch: (payload) => this.eachBatchConsumer(payload), }) this.consumerReady = true diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index a6bd8fe69ce17..9a0e0f4ebfe52 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -129,6 +129,7 @@ export interface PluginsServerConfig { KAFKA_SASL_PASSWORD: string | undefined KAFKA_CLIENT_RACK: string | undefined KAFKA_CONSUMPTION_USE_RDKAFKA: boolean + KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: boolean KAFKA_CONSUMPTION_MAX_BYTES: number KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: number KAFKA_CONSUMPTION_MAX_WAIT_MS: number // fetch.wait.max.ms rdkafka parameter diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index 710a163752a6b..4e37d8a5cd715 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -91,7 +91,6 @@ export async function createHub( : undefined, rejectUnauthorized: serverConfig.CLICKHOUSE_CA ? false : undefined, }) - await clickhouse.querying('SELECT 1') // test that the connection works status.info('👍', `ClickHouse ready`) status.info('🤔', `Connecting to Kafka...`) diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts index 69c56640bf886..aace016721449 100644 --- a/plugin-server/src/utils/utils.ts +++ b/plugin-server/src/utils/utils.ts @@ -312,14 +312,6 @@ export function escapeClickHouseString(string: string): string { return string.replace(/\\/g, '\\\\').replace(/'/g, "\\'") } -export function groupIntoBatches(array: T[], batchSize: number): T[][] { - const batches = [] - for (let i = 0; i < array.length; i += batchSize) { - batches.push(array.slice(i, i + batchSize)) - } - return batches -} - /** Standardize JS code used internally to form without extraneous indentation. Template literal function. */ export function code(strings: TemplateStringsArray): string { const stringsConcat = strings.join('…') diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts index 617978884fe29..0580f53d2724b 100644 --- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts +++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts @@ -9,13 +9,10 @@ import { eachBatchLegacyIngestion, splitKafkaJSIngestionBatch, } from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion-kafkajs' -import { - eachBatch, - eachBatchAppsOnEventHandlers, -} from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent' +import { eachBatchAppsOnEventHandlers } from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent' import { eachBatchWebhooksHandlers, - groupIntoBatchesWebhooks, + groupIntoBatchesByUsage, } from '../../../src/main/ingestion-queues/batch-processing/each-batch-webhooks' import { ClickHouseTimestamp, @@ -24,7 +21,6 @@ import { PostIngestionEvent, RawClickHouseEvent, } from '../../../src/types' -import { groupIntoBatches } from '../../../src/utils/utils' import { ActionManager } from '../../../src/worker/ingestion/action-manager' import { ActionMatcher } from '../../../src/worker/ingestion/action-matcher' import { HookCommander } from '../../../src/worker/ingestion/hooks' @@ -150,26 +146,6 @@ describe('eachBatchX', () => { } }) - describe('eachBatch', () => { - it('calls eachMessage with the correct arguments', async () => { - const eachMessage = jest.fn(() => Promise.resolve()) - const batch = createKafkaJSBatch(event) - await eachBatch(batch, queue, eachMessage, groupIntoBatches, 'key') - - expect(eachMessage).toHaveBeenCalledWith({ value: JSON.stringify(event) }, queue) - }) - - it('tracks metrics based on the key', async () => { - const eachMessage = jest.fn(() => Promise.resolve()) - await eachBatch(createKafkaJSBatch(event), queue, eachMessage, groupIntoBatches, 'my_key') - - expect(queue.pluginsServer.statsd.timing).toHaveBeenCalledWith( - 'kafka_queue.each_batch_my_key', - expect.any(Date) - ) - }) - }) - describe('eachBatchAppsOnEventHandlers', () => { it('calls runAppsOnEventPipeline when useful', async () => { queue.pluginsServer.pluginConfigsPerTeam.set(2, [pluginConfig39]) @@ -333,11 +309,9 @@ describe('eachBatchX', () => { kafkaTimestamp: '2020-02-23 00:10:00.00' as ClickHouseTimestamp, }, ]) - const actionManager = new ActionManager(queue.pluginsServer.postgres) - const actionMatcher = new ActionMatcher(queue.pluginsServer.postgres, actionManager) - // mock hasWebhooks 10 calls, 1,3,10 should return false, others true - actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) - const result = groupIntoBatchesWebhooks(batch.batch.messages, 5, actionMatcher) + // teamIDs 1,3,10 should return false, others true + const toProcess = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) + const result = groupIntoBatchesByUsage(batch.batch.messages, 5, toProcess) expect(result).toEqual([ { eventBatch: expect.arrayContaining([ @@ -375,8 +349,7 @@ describe('eachBatchX', () => { ]) // make sure that if the last message would be a new batch and if it's going to be excluded we // still get the last batch as empty with the right offsite and timestamp - actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) - const result2 = groupIntoBatchesWebhooks(batch.batch.messages, 7, actionMatcher) + const result2 = groupIntoBatchesByUsage(batch.batch.messages, 7, toProcess) expect(result2).toEqual([ { eventBatch: expect.arrayContaining([ diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts index c0912a2ca499b..31dc19d000f3b 100644 --- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts +++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts @@ -1,4 +1,7 @@ +import { Assignment } from 'node-rdkafka-acosom' + import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics' +import { countPartitionsPerTopic } from '../../../src/kafka/consumer' import { ServerInstance, startPluginsServer } from '../../../src/main/pluginsServer' import { LogLevel, PluginsServerConfig } from '../../../src/types' import { Hub } from '../../../src/types' @@ -79,3 +82,22 @@ describe.skip('IngestionConsumer', () => { expect(bufferCalls.length).toEqual(1) }) }) + +describe('countPartitionsPerTopic', () => { + it('should correctly count the number of partitions per topic', () => { + const assignments: Assignment[] = [ + { topic: 'topic1', partition: 0 }, + { topic: 'topic1', partition: 1 }, + { topic: 'topic2', partition: 0 }, + { topic: 'topic2', partition: 1 }, + { topic: 'topic2', partition: 2 }, + { topic: 'topic3', partition: 0 }, + ] + + const result = countPartitionsPerTopic(assignments) + expect(result.get('topic1')).toBe(2) + expect(result.get('topic2')).toBe(3) + expect(result.get('topic3')).toBe(1) + expect(result.size).toBe(3) + }) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index c78c38396aa16..ba8713e974840 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -39,8 +39,8 @@ dependencies: specifier: ^2.1.2 version: 2.1.2(react@16.14.0) '@rrweb/types': - specifier: ^2.0.0-alpha.9 - version: 2.0.0-alpha.9 + specifier: ^2.0.0-alpha.11 + version: 2.0.0-alpha.11 '@sentry/react': specifier: 7.22.0 version: 7.22.0(react@16.14.0) @@ -195,8 +195,8 @@ dependencies: specifier: ^0.39.0 version: 0.39.0 posthog-js: - specifier: 1.78.2 - version: 1.78.2 + specifier: 1.78.4 + version: 1.78.4 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -282,8 +282,8 @@ dependencies: specifier: ^1.5.1 version: 1.5.1 rrweb: - specifier: ^2.0.0-alpha.9 - version: 2.0.0-alpha.9 + specifier: ^2.0.0-alpha.11 + version: 2.0.0-alpha.11 sass: specifier: ^1.26.2 version: 1.56.0 @@ -3928,10 +3928,10 @@ packages: type-fest: 2.19.0 dev: false - /@rrweb/types@2.0.0-alpha.9: - resolution: {integrity: sha512-yS2KghLSmSSxo6H7tHrJ6u+nWJA9zCXaKFyc79rUSX8RHHSImRqocTqJ8jz794kCIWA90rvaQayRONdHO+vB0Q==} + /@rrweb/types@2.0.0-alpha.11: + resolution: {integrity: sha512-8ccocIkT5J/bfNRQY85qR/g6p5YQFpgFO2cMt4+Ex7w31Lq0yqZBRaoYEsawQKpLrn5KOHkdn2UTUrna7WMQuA==} dependencies: - rrweb-snapshot: 2.0.0-alpha.9 + rrweb-snapshot: 2.0.0-alpha.11 dev: false /@sentry/browser@7.22.0: @@ -14908,8 +14908,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.78.2: - resolution: {integrity: sha512-jDy0QR+Mt7c4efq4knUsDVx/dT9DKMRLPimR/aSNTPRlAdWDNYD6WFv3oFyUk5tzkOPcKVJItRmmS2ua3tesYA==} + /posthog-js@1.78.4: + resolution: {integrity: sha512-+ao0/qoP8fUErrF6Y2Yugilkh4ooh8MX6n6ckYsn3yV610YrpXGR165fbq8X1ukimV4YmkWbMHOACtkOuZ/+8w==} dependencies: fflate: 0.4.8 dev: false @@ -16607,27 +16607,27 @@ packages: resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==} dev: false - /rrdom@2.0.0-alpha.9: - resolution: {integrity: sha512-jfaZ8tHi098P4GpPEtkOwnkucyKA5eGanAVHGPklzCqAeEq1Yx+9/y8AeOtF3yiobqKKkW8lLvFH2KrBH1CZlQ==} + /rrdom@2.0.0-alpha.11: + resolution: {integrity: sha512-U37m0t4jTz63wnVRcOQ5qFzSTrI5RdNgeXnHAha2Fmh9+1K+XuCx421a8D1wZk3WcDc2sFz/04FVdM0OD2caHg==} dependencies: - rrweb-snapshot: 2.0.0-alpha.9 + rrweb-snapshot: 2.0.0-alpha.11 dev: false - /rrweb-snapshot@2.0.0-alpha.9: - resolution: {integrity: sha512-mHg1uUE2iUf0MXLE//4r5cMynkbduwmaOEis4gC7EuqkUAC1pYoLpcYYVt9lD6dgYIF6BmK6dgLLzMpD/tTyyA==} + /rrweb-snapshot@2.0.0-alpha.11: + resolution: {integrity: sha512-N0dzeJA2VhrlSOadkKwCVmV/DuNOwBH+Lhx89hAf9PQK4lCS8AP4AaylhqUdZOYHqwVjqsYel/uZ4hN79vuLhw==} dev: false - /rrweb@2.0.0-alpha.9: - resolution: {integrity: sha512-8E2yiLY7IrFjDcVUZ7AcQtdBNFuTIsBrlCMpbyLua6X64dGRhOZ+IUDXLnAbNj5oymZgFtZu2UERG9rmV2VAng==} + /rrweb@2.0.0-alpha.11: + resolution: {integrity: sha512-vJ2gNvF+pUG9C2aaau7iSNqhWBSc4BwtUO4FpegOtDObuH4PIaxNJOlgHz82+WxKr9XPm93ER0LqmNpy0KYdKg==} dependencies: - '@rrweb/types': 2.0.0-alpha.9 + '@rrweb/types': 2.0.0-alpha.11 '@types/css-font-loading-module': 0.0.7 '@xstate/fsm': 1.6.5 base64-arraybuffer: 1.0.2 fflate: 0.4.8 mitt: 3.0.0 - rrdom: 2.0.0-alpha.9 - rrweb-snapshot: 2.0.0-alpha.9 + rrdom: 2.0.0-alpha.11 + rrweb-snapshot: 2.0.0-alpha.11 dev: false /rtl-css-js@1.16.0: diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index bdd8ecf3ed555..f61543e14f5cb 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -1,9 +1,8 @@ import json from typing import Any, Dict, List, Optional, cast -from django.db.models import QuerySet +from django.db.models import QuerySet, Q from django.conf import settings -from django.db.models.query_utils import Q from rest_framework import authentication, exceptions, request, serializers, status, viewsets from rest_framework.decorators import action from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated @@ -70,6 +69,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo rollout_percentage = serializers.SerializerMethodField() experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) + surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True) analytics_dashboards = serializers.PrimaryKeyRelatedField( @@ -100,6 +100,7 @@ class Meta: "rollout_percentage", "ensure_experience_continuity", "experiment_set", + "surveys", "features", "rollback_conditions", "performed_rollback", @@ -129,6 +130,12 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict: return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data + def get_surveys(self, feature_flag: FeatureFlag) -> Dict: + from posthog.api.survey import SurveyAPISerializer + + return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore + # ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship + def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]: if self.get_is_simple_flag(feature_flag): return feature_flag.conditions[0].get("rollout_percentage") @@ -343,7 +350,9 @@ def get_queryset(self) -> QuerySet: .prefetch_related("experiment_set") .prefetch_related("features") .prefetch_related("analytics_dashboards") + .prefetch_related("surveys_linked_flag") ) + survey_targeting_flags = Survey.objects.filter(team=self.team, targeting_flag__isnull=False).values_list( "targeting_flag_id", flat=True ) @@ -434,6 +443,7 @@ def my_flags(self, request: request.Request, **kwargs): .prefetch_related("experiment_set") .prefetch_related("features") .prefetch_related("analytics_dashboards") + .prefetch_related("surveys_linked_flag") .select_related("created_by") .order_by("-created_at") ) diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 7f3cfae9be957..5c25efe42815d 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -1,5 +1,5 @@ from typing import Dict, List, Optional, Any - +from django.db.models import Q import structlog from django.db import transaction from django.db.models import QuerySet @@ -74,6 +74,7 @@ class Meta: "short_id", "title", "content", + "text_content", "version", "deleted", "created_at", @@ -250,8 +251,13 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query queryset = queryset.filter( last_modified_at__lt=relative_date_parse(request.GET["date_to"], self.team.timezone_info) ) - elif key == "s": - queryset = queryset.filter(title__icontains=request.GET["s"]) + elif key == "search": + queryset = queryset.filter( + # some notebooks have no text_content until next saved, so we need to check the title too + # TODO this can be removed once all/most notebooks have text_content + Q(title__search=request.GET["search"]) + | Q(text_content__search=request.GET["search"]) + ) elif key == "contains": contains = request.GET["contains"] match_pairs = contains.replace(",", " ").split(" ") diff --git a/posthog/api/query.py b/posthog/api/query.py index f6c9e871d0c6d..5e4e14c34f999 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -25,6 +25,7 @@ from posthog.hogql.errors import HogQLException from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.query import execute_hogql_query +from posthog.hogql_queries.lifecycle_hogql_query import run_lifecycle_query from posthog.models import Team from posthog.models.event.events_query import run_events_query from posthog.models.user import User @@ -32,7 +33,7 @@ from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle, TeamRateThrottle -from posthog.schema import EventsQuery, HogQLQuery, HogQLMetadata +from posthog.schema import EventsQuery, HogQLQuery, HogQLMetadata, LifecycleQuery class QueryThrottle(TeamRateThrottle): @@ -203,22 +204,26 @@ def process_query(team: Team, query_json: Dict, default_limit: Optional[int] = N if query_kind == "EventsQuery": events_query = EventsQuery.parse_obj(query_json) - response = run_events_query(query=events_query, team=team, default_limit=default_limit) - return _unwrap_pydantic_dict(response) + events_response = run_events_query(query=events_query, team=team, default_limit=default_limit) + return _unwrap_pydantic_dict(events_response) elif query_kind == "HogQLQuery": hogql_query = HogQLQuery.parse_obj(query_json) - response = execute_hogql_query( + hogql_response = execute_hogql_query( query_type="HogQLQuery", query=hogql_query.query, team=team, filters=hogql_query.filters, default_limit=default_limit, ) - return _unwrap_pydantic_dict(response) + return _unwrap_pydantic_dict(hogql_response) elif query_kind == "HogQLMetadata": metadata_query = HogQLMetadata.parse_obj(query_json) - response = get_hogql_metadata(query=metadata_query, team=team) - return _unwrap_pydantic_dict(response) + metadata_response = get_hogql_metadata(query=metadata_query, team=team) + return _unwrap_pydantic_dict(metadata_response) + elif query_kind == "LifecycleQuery": + lifecycle_query = LifecycleQuery.parse_obj(query_json) + lifecycle_response = run_lifecycle_query(query=lifecycle_query, team=team) + return _unwrap_pydantic_dict(lifecycle_response) elif query_kind == "DatabaseSchemaQuery": database = create_hogql_database(team.pk) return serialize_database(database) diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index 396f5103c7ec3..299074ec3d44b 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -40,6 +40,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -51,6 +52,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -76,6 +78,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -94,6 +97,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -120,6 +124,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -131,6 +136,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -169,6 +175,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -180,6 +187,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -277,6 +285,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -288,6 +297,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -479,6 +489,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -490,6 +501,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -555,6 +567,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -572,6 +585,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -583,6 +597,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -671,6 +686,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -682,6 +698,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py index 3f49024d708e9..1b7f36ae54ce3 100644 --- a/posthog/api/test/notebooks/test_notebook.py +++ b/posthog/api/test/notebooks/test_notebook.py @@ -1,4 +1,4 @@ -from typing import List, Dict, Optional +from typing import List, Dict from unittest import mock from freezegun import freeze_time @@ -67,17 +67,20 @@ def test_cannot_list_deleted_notebook(self) -> None: @parameterized.expand( [ - ("without_content", None), - ("with_content", {"some": "kind", "of": "tip", "tap": "content"}), + ("without_content", None, None), + ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"), ] ) - def test_create_a_notebook(self, _, content: Optional[Dict]) -> None: - response = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={"content": content}) + def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None: + response = self.client.post( + f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content} + ) assert response.status_code == status.HTTP_201_CREATED assert response.json() == { "id": response.json()["id"], "short_id": response.json()["short_id"], "content": content, + "text_content": text_content, "title": None, "version": 0, "created_at": mock.ANY, diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py index 4e9f9370c178d..5f634de548fc7 100644 --- a/posthog/api/test/notebooks/test_notebook_filtering.py +++ b/posthog/api/test/notebooks/test_notebook_filtering.py @@ -42,7 +42,7 @@ }, } -BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}]} +BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text} class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest): @@ -62,20 +62,22 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit @parameterized.expand( [ - ["some text", [0]], - ["other text", [1]], - ["text", [0, 1]], + ["i ride", [0]], + ["pony", [0]], + ["ponies", [0]], + ["my hobby", [1]], + ["around", [0, 1]], ["random", []], ] ) def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None: notebook_ids = [ - self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="some text"), - self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="other text"), + self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"), + self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"), ] response = self.client.get( - f"/api/projects/{self.team.id}/notebooks?s={search_text}", + f"/api/projects/{self.team.id}/notebooks?search={search_text}", ) assert response.status_code == status.HTTP_200_OK @@ -83,6 +85,32 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes: assert len(results) == len(expected_match_indexes) assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes]) + @parameterized.expand( + [ + ["pony", [0]], + ["pOnY", [0]], + ["ponies", [0]], + ["goat", [1]], + ["ride", [0, 1]], + ["neither", []], + ] + ) + def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None: + notebook_ids = [ + # will match both pony and ponies + self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"), + self._create_notebook_with_content([BASIC_TEXT("but may not ride a goat")], title="never matches"), + ] + + response = self.client.get( + f"/api/projects/{self.team.id}/notebooks?search={search_text}", + ) + assert response.status_code == status.HTTP_200_OK + + results = response.json()["results"] + assert len(results) == len(expected_match_indexes) + assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes]) + def test_filters_based_on_params(self) -> None: other_user = User.objects.create_and_join(self.organization, "other@posthog.com", "password") notebook_one = Notebook.objects.create(team=self.team, created_by=self.user) diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 9efebf97b878b..b0d6f73c87ebb 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -939,7 +939,7 @@ def test_my_flags_is_not_nplus1(self) -> None: format="json", ).json() - with self.assertNumQueries(9): + with self.assertNumQueries(10): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -950,7 +950,7 @@ def test_my_flags_is_not_nplus1(self) -> None: format="json", ).json() - with self.assertNumQueries(9): + with self.assertNumQueries(10): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 45e13024c1a0b..f393e5cec4379 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -77,6 +77,80 @@ def test_can_create_survey_with_linked_flag_and_targeting(self): {"type": "open", "question": "What would you want to improve from notebooks?"} ] + def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None: + self.maxDiff = None + + ff_key = "notebooks" + notebooks_flag = FeatureFlag.objects.create(team=self.team, key=ff_key, created_by=self.user) + + response = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "Notebooks power users survey", + "type": "popover", + "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "linked_flag_id": notebooks_flag.id, + "targeting_flag_filters": { + "groups": [ + { + "variant": None, + "rollout_percentage": None, + "properties": [ + {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + ], + } + ] + }, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ) + + response_data = response.json() + assert response.status_code == status.HTTP_201_CREATED, response_data + assert response_data["linked_flag"]["id"] == notebooks_flag.id + assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists() + + created_survey1 = response.json()["id"] + + response = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "Notebooks random survey", + "type": "popover", + "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "linked_flag_id": notebooks_flag.id, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ) + + response_data = response.json() + assert response.status_code == status.HTTP_201_CREATED, response_data + assert response_data["linked_flag"]["id"] == notebooks_flag.id + assert response_data["targeting_flag"] is None + + created_survey2 = response.json()["id"] + + # add another random feature flag + self.client.post( + f"/api/projects/{self.team.id}/feature_flags/", + data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}}, + format="json", + ).json() + + with self.assertNumQueries(12): + response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") + self.assertEqual(response.status_code, status.HTTP_200_OK) + result = response.json() + + self.assertEqual(result["count"], 2) + + self.assertEqual( + [(res["key"], [survey["id"] for survey in res["surveys"]]) for res in result["results"]], + [("flag_0", []), (ff_key, [created_survey1, created_survey2])], + ) + def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): survey_with_targeting = self.client.post( f"/api/projects/{self.team.id}/surveys/", diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py index 670b98cfd45e5..bd63ce32754c0 100644 --- a/posthog/hogql/placeholders.py +++ b/posthog/hogql/placeholders.py @@ -32,7 +32,7 @@ def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]): def visit_placeholder(self, node): if not self.placeholders: raise HogQLException(f"Placeholders, such as {{{node.field}}}, are not supported in this context") - if node.field in self.placeholders: + if node.field in self.placeholders and self.placeholders[node.field] is not None: new_node = self.placeholders[node.field] new_node.start = node.start new_node.end = node.end diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 3caa10d51f8f6..81efafc225a1f 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -15,7 +15,7 @@ from posthog.models.property import PropertyGroup from posthog.models.property.util import build_selector_regex from posthog.models.property_definition import PropertyType -from posthog.schema import PropertyOperator +from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator def has_aggregation(expr: AST) -> bool: @@ -59,16 +59,30 @@ def property_to_expr(property: Union[BaseModel, PropertyGroup, Property, dict, l return ast.And(exprs=properties) elif isinstance(property, Property): pass - elif isinstance(property, PropertyGroup): - if property.type != PropertyOperatorType.AND and property.type != PropertyOperatorType.OR: + elif ( + isinstance(property, PropertyGroup) + or isinstance(property, PropertyGroupFilter) + or isinstance(property, PropertyGroupFilterValue) + ): + if ( + isinstance(property, PropertyGroup) + and property.type != PropertyOperatorType.AND + and property.type != PropertyOperatorType.OR + ): raise NotImplementedException(f'PropertyGroup of unknown type "{property.type}"') + if ( + (isinstance(property, PropertyGroupFilter) or isinstance(property, PropertyGroupFilterValue)) + and property.type != FilterLogicalOperator.AND + and property.type != FilterLogicalOperator.OR + ): + raise NotImplementedException(f'PropertyGroupFilter of unknown type "{property.type}"') if len(property.values) == 0: return ast.Constant(value=True) if len(property.values) == 1: return property_to_expr(property.values[0], team) - if property.type == PropertyOperatorType.AND: + if property.type == PropertyOperatorType.AND or property.type == FilterLogicalOperator.AND: return ast.And(exprs=[property_to_expr(p, team) for p in property.values]) else: return ast.Or(exprs=[property_to_expr(p, team) for p in property.values]) diff --git a/posthog/hogql_queries/lifecycle_hogql_query.py b/posthog/hogql_queries/lifecycle_hogql_query.py index 2df71a976d1a9..6b73034fdfcf3 100644 --- a/posthog/hogql_queries/lifecycle_hogql_query.py +++ b/posthog/hogql_queries/lifecycle_hogql_query.py @@ -1,43 +1,27 @@ -from typing import Dict, Any +from typing import Optional from django.utils.timezone import datetime from posthog.hogql import ast from posthog.hogql.parser import parse_expr, parse_select +from posthog.hogql.property import property_to_expr, action_to_expr from posthog.hogql.query import execute_hogql_query +from posthog.hogql.timings import HogQLTimings +from posthog.models import Team, Action from posthog.hogql_queries.query_date_range import QueryDateRange -from posthog.models import Team -from posthog.schema import LifecycleQuery +from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse -def create_time_filter(date_range: QueryDateRange) -> ast.Expr: - # don't need timezone here, as HogQL will use the project timezone automatically - # :TRICKY: We fetch all data even for the period before the graph starts up until the end of the last period - time_filter = parse_expr( - """ - (timestamp >= dateTrunc({interval}, {date_from}) - {one_interval_period}) - AND - (timestamp < dateTrunc({interval}, {date_to}) + {one_interval_period}) - """, - placeholders={ - "date_from": date_range.date_from_as_hogql, - "date_to": date_range.date_to_as_hogql, - "one_interval_period": date_range.one_interval_period_as_hogql, - "interval": date_range.interval_period_string_as_hogql, - }, - ) - - return time_filter - - -def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr): - if not event_filter: - event_filter = ast.Constant(value=True) - +def create_events_query( + query_date_range: QueryDateRange, + event_filter: Optional[ast.Expr], + timings: HogQLTimings, + sampling_factor: Optional[float] = None, +): placeholders = { - "event_filter": event_filter, - "interval": date_range.interval_period_string_as_hogql, - "one_interval_period": date_range.one_interval_period_as_hogql, + "event_filter": event_filter or ast.Constant(value=True), + "interval": query_date_range.interval_period_string_as_hogql_constant(), + "one_interval_period": query_date_range.one_interval_period(), } events_query = parse_select( @@ -61,105 +45,182 @@ def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr): GROUP BY person_id """, placeholders=placeholders, + timings=timings, ) - return events_query + if sampling_factor is not None and isinstance(sampling_factor, float): + sample_expr = ast.SampleExpr(sample_value=ast.RatioExpr(left=ast.Constant(value=sampling_factor))) + events_query.select_from.sample = sample_expr -def run_lifecycle_query( - team: Team, - query: LifecycleQuery, -) -> Dict[str, Any]: - now_dt = datetime.now() + return events_query - query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt) - interval = query_date_range.interval.name - one_interval_period = query_date_range.one_interval_period_as_hogql - number_interval_period = query_date_range.interval_periods_as_hogql("number") +def run_lifecycle_query(team: Team, query: LifecycleQuery) -> LifecycleQueryResponse: + now_dt = datetime.now() + timings = HogQLTimings() + + event_filter = [] + with timings.measure("date_range"): + query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt) + event_filter.append( + parse_expr( + "timestamp >= dateTrunc({interval}, {date_from}) - {one_interval}", + { + "interval": query_date_range.interval_period_string_as_hogql_constant(), + "one_interval": query_date_range.one_interval_period(), + "date_from": query_date_range.date_from_as_hogql(), + }, + timings=timings, + ) + ) + event_filter.append( + parse_expr( + "timestamp < dateTrunc({interval}, {date_to}) + {one_interval}", + { + "interval": query_date_range.interval_period_string_as_hogql_constant(), + "one_interval": query_date_range.one_interval_period(), + "date_to": query_date_range.date_to_as_hogql(), + }, + timings=timings, + ) + ) - time_filter = create_time_filter(query_date_range) - event_filter = time_filter # TODO: add all other filters + with timings.measure("properties"): + if query.properties is not None and query.properties != []: + event_filter.append(property_to_expr(query.properties, team)) + + with timings.measure("series_filters"): + for serie in query.series or []: + if isinstance(serie, ActionsNode): + action = Action.objects.get(pk=int(serie.id), team=team) + event_filter.append(action_to_expr(action)) + elif isinstance(serie, EventsNode): + if serie.event is not None: + event_filter.append( + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=["event"]), + right=ast.Constant(value=str(serie.event)), + ) + ) + else: + raise ValueError(f"Invalid serie kind: {serie.kind}") + if serie.properties is not None and serie.properties != []: + event_filter.append(property_to_expr(serie.properties, team)) + + with timings.measure("test_account_filters"): + if ( + query.filterTestAccounts + and isinstance(team.test_account_filters, list) + and len(team.test_account_filters) > 0 + ): + for property in team.test_account_filters: + event_filter.append(property_to_expr(property, team)) + + if len(event_filter) == 0: + event_filter = ast.Constant(value=True) + elif len(event_filter) == 1: + event_filter = event_filter[0] + else: + event_filter = ast.And(exprs=event_filter) placeholders = { - "interval": ast.Constant(value=interval), - "one_interval_period": one_interval_period, - "number_interval_period": number_interval_period, + "interval": query_date_range.interval_period_string_as_hogql_constant(), + "one_interval_period": query_date_range.one_interval_period(), + "number_interval_period": query_date_range.number_interval_periods(), "event_filter": event_filter, - "date_from": query_date_range.date_from_as_hogql, - "date_to": query_date_range.date_to_as_hogql, + "date_from": query_date_range.date_from_as_hogql(), + "date_to": query_date_range.date_to_as_hogql(), } - events_query = create_events_query(date_range=query_date_range, event_filter=event_filter) + with timings.measure("events_query"): + events_query = create_events_query( + query_date_range=query_date_range, + event_filter=event_filter, + sampling_factor=query.samplingFactor, + timings=timings, + ) - periods = parse_select( - """ - SELECT ( - dateTrunc({interval}, {date_to}) - {number_interval_period} - ) AS start_of_period - FROM numbers( - dateDiff( - {interval}, - dateTrunc({interval}, {date_from}), - dateTrunc({interval}, {date_to} + {one_interval_period}) + with timings.measure("periods_query"): + periods = parse_select( + """ + SELECT ( + dateTrunc({interval}, {date_to}) - {number_interval_period} + ) AS start_of_period + FROM numbers( + dateDiff( + {interval}, + dateTrunc({interval}, {date_from}), + dateTrunc({interval}, {date_to} + {one_interval_period}) + ) ) - ) - """, - placeholders=placeholders, - ) + """, + placeholders=placeholders, + timings=timings, + ) - lifecycle_sql = parse_select( - """ - SELECT groupArray(start_of_period) AS date, - groupArray(counts) AS total, - status - FROM ( - SELECT - status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts, - start_of_period, - status + with timings.measure("lifecycle_query"): + lifecycle_sql = parse_select( + """ + SELECT groupArray(start_of_period) AS date, + groupArray(counts) AS total, + status FROM ( SELECT - periods.start_of_period as start_of_period, - 0 AS counts, + status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts, + start_of_period, status - FROM {periods} as periods - CROSS JOIN ( - SELECT status - FROM (SELECT 1) - ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status - ) as sec - ORDER BY status, start_of_period - UNION ALL - SELECT - start_of_period, count(DISTINCT person_id) AS counts, status - FROM {events_query} + FROM ( + SELECT + periods.start_of_period as start_of_period, + 0 AS counts, + status + FROM {periods} as periods + CROSS JOIN ( + SELECT status + FROM (SELECT 1) + ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status + ) as sec + ORDER BY status, start_of_period + UNION ALL + SELECT + start_of_period, count(DISTINCT person_id) AS counts, status + FROM {events_query} + GROUP BY start_of_period, status + ) + WHERE start_of_period <= dateTrunc({interval}, {date_to}) + AND start_of_period >= dateTrunc({interval}, {date_from}) GROUP BY start_of_period, status + ORDER BY start_of_period ASC ) - WHERE start_of_period <= dateTrunc({interval}, {date_to}) - AND start_of_period >= dateTrunc({interval}, {date_from}) - GROUP BY start_of_period, status - ORDER BY start_of_period ASC - ) - GROUP BY status - """, - {**placeholders, "periods": periods, "events_query": events_query}, - ) + GROUP BY status + """, + {**placeholders, "periods": periods, "events_query": events_query}, + timings=timings, + ) response = execute_hogql_query( team=team, query=lifecycle_sql, query_type="LifecycleQuery", + timings=timings, ) # ensure that the items are in a deterministic order order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4} - results = sorted(response.results, key=lambda result: order.get(result[2], result[2])) + results = sorted(response.results, key=lambda result: order.get(result[2], 5)) res = [] for val in results: counts = val[1] - labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if interval == "hour" else "")) for item in val[0]] - days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if interval == "hour" else "")) for item in val[0]] + labels = [ + item.strftime("%-d-%b-%Y{}".format(" %H:%M" if query_date_range.interval_name == "hour" else "")) + for item in val[0] + ] + days = [ + item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if query_date_range.interval_name == "hour" else "")) + for item in val[0] + ] label = "{} - {}".format("", val[2]) # entity.name additional_values = {"label": label, "status": val[2]} @@ -173,4 +234,4 @@ def run_lifecycle_query( } ) - return {"result": res} + return LifecycleQueryResponse(result=res, timings=response.timings) diff --git a/posthog/hogql_queries/query_date_range.py b/posthog/hogql_queries/query_date_range.py index 4d76b222deb2b..35695b37181c5 100644 --- a/posthog/hogql_queries/query_date_range.py +++ b/posthog/hogql_queries/query_date_range.py @@ -1,11 +1,12 @@ +import re +from functools import cached_property from datetime import datetime -from functools import cached_property, lru_cache from typing import Optional +from zoneinfo import ZoneInfo -import pytz from dateutil.relativedelta import relativedelta -from posthog.hogql.parser import parse_expr, ast +from posthog.hogql.parser import ast from posthog.models.team import Team from posthog.queries.util import get_earliest_timestamp from posthog.schema import DateRange, IntervalType @@ -19,96 +20,96 @@ class QueryDateRange: _team: Team _date_range: Optional[DateRange] _interval: Optional[IntervalType] - _now_non_timezone: datetime + _now_without_timezone: datetime def __init__( self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime ) -> None: self._team = team self._date_range = date_range - self._interval = interval - self._now_non_timezone = now + self._interval = interval or IntervalType.day + self._now_without_timezone = now + + if not isinstance(self._interval, IntervalType) or re.match(r"[^a-z]", self._interval.name): + raise ValueError(f"Invalid interval: {interval}") - @cached_property def date_to(self) -> datetime: - date_to = self._now + date_to = self.now_with_timezone delta_mapping = None if self._date_range and self._date_range.date_to: date_to, delta_mapping = relative_date_parse_with_delta_mapping( - self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self._now + self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone ) is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None - if not self.is_hourly(): + if not self.is_hourly: date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999) elif is_relative: date_to = date_to.replace(minute=59, second=59, microsecond=999999) return date_to - def get_earliest_timestamp(self): + def get_earliest_timestamp(self) -> datetime: return get_earliest_timestamp(self._team.pk) - @cached_property def date_from(self) -> datetime: date_from: datetime if self._date_range and self._date_range.date_from == "all": date_from = self.get_earliest_timestamp() elif self._date_range and isinstance(self._date_range.date_from, str): - date_from = relative_date_parse(self._date_range.date_from, self._team.timezone_info, now=self._now) + date_from = relative_date_parse( + self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone + ) else: - date_from = self._now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta( + date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta( days=DEFAULT_DATE_FROM_DAYS ) - if not self.is_hourly(): + if not self.is_hourly: date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0) return date_from @cached_property - def _now(self): - return self._localize_to_team(self._now_non_timezone) - - def _localize_to_team(self, target: datetime): - return target.astimezone(pytz.timezone(self._team.timezone)) + def now_with_timezone(self) -> datetime: + return self._now_without_timezone.astimezone(ZoneInfo(self._team.timezone)) @cached_property def date_to_str(self) -> str: - return self.date_to.strftime("%Y-%m-%d %H:%M:%S") + return self.date_to().strftime("%Y-%m-%d %H:%M:%S") @cached_property def date_from_str(self) -> str: - return self.date_from.strftime("%Y-%m-%d %H:%M:%S") - - def is_hourly(self): - return self.interval.name == "hour" + return self.date_from().strftime("%Y-%m-%d %H:%M:%S") @cached_property - def date_to_as_hogql(self): - return parse_expr(f"assumeNotNull(toDateTime('{self.date_to_str}'))") + def is_hourly(self) -> bool: + return self.interval_name == "hour" @cached_property - def date_from_as_hogql(self): - return parse_expr(f"assumeNotNull(toDateTime('{self.date_from_str}'))") - - @cached_property - def interval(self): + def interval_type(self) -> IntervalType: return self._interval or IntervalType.day @cached_property - def one_interval_period_as_hogql(self): - return parse_expr(f"toInterval{self.interval.capitalize()}(1)") + def interval_name(self) -> str: + return self.interval_type.name - @lru_cache - def interval_periods_as_hogql(self, s: str): - return parse_expr(f"toInterval{self.interval.capitalize()}({s})") + def date_to_as_hogql(self) -> ast.Expr: + return ast.Call( + name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])] + ) - @cached_property - def interval_period_string(self): - return self.interval.value + def date_from_as_hogql(self) -> ast.Expr: + return ast.Call( + name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])] + ) - @cached_property - def interval_period_string_as_hogql(self): - return ast.Constant(value=self.interval.value) + def one_interval_period(self) -> ast.Expr: + return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)]) + + def number_interval_periods(self) -> ast.Expr: + return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])]) + + def interval_period_string_as_hogql_constant(self) -> ast.Expr: + return ast.Constant(value=self.interval_name) diff --git a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py index 5cc56252b046f..fb35ace5f5baa 100644 --- a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py +++ b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py @@ -1,11 +1,7 @@ -from datetime import datetime - from freezegun import freeze_time -from posthog.hogql.query import execute_hogql_query from posthog.models.utils import UUIDT -from posthog.hogql_queries.lifecycle_hogql_query import create_events_query, create_time_filter, run_lifecycle_query -from posthog.hogql_queries.query_date_range import QueryDateRange +from posthog.hogql_queries.lifecycle_hogql_query import run_lifecycle_query from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events @@ -67,99 +63,6 @@ def _create_test_events(self): ] ) - def _run_events_query(self, date_from, date_to, interval): - date_range = QueryDateRange( - date_range=DateRange(date_from=date_from, date_to=date_to), - team=self.team, - interval=interval, - now=datetime.strptime("2020-01-30T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), - ) - time_filter = create_time_filter(date_range) - - # TODO probably doesn't make sense to test like this - # maybe this query should be what is returned by the function - events_query = create_events_query(event_filter=time_filter, date_range=date_range) - return execute_hogql_query( - team=self.team, - query=""" - SELECT - start_of_period, count(DISTINCT person_id) AS counts, status - FROM {events_query} - GROUP BY start_of_period, status - """, - query_type="LifecycleQuery", - placeholders={"events_query": events_query}, - ) - - def test_events_query_whole_range(self): - self._create_test_events() - - date_from = "2020-01-09" - date_to = "2020-01-19" - - response = self._run_events_query(date_from, date_to, IntervalType.day) - - self.assertEqual( - { - (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2 - (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2 - (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 - (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4 - (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4 - (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1 - (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1 - }, - set(response.results), - ) - - def test_events_query_partial_range(self): - self._create_test_events() - date_from = "2020-01-12" - date_to = "2020-01-14" - response = self._run_events_query(date_from, date_to, IntervalType.day) - - self.assertEqual( - { - (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 - (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - }, - set(response.results), - ) - - # def test_start_on_dormant(self): - # self.create_test_events() - # date_from = "2020-01-13" - # date_to = "2020-01-14" - # response = self.run_events_query(date_from, date_to, IntervalType.day) - # - # self.assertEqual( - # { - # (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - # # TODO this currently fails, as it treats p1 as resurrecting. - # # This might just be fine, later in the query we would just throw away results before the 13th - # (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - # (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - # (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - # (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - # (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - # }, - # set(response.results), - # ) - def _run_lifecycle_query(self, date_from, date_to, interval): series = [EventsNode(event="$pageview")] query = LifecycleQuery( @@ -175,7 +78,7 @@ def test_lifecycle_query_whole_range(self): response = self._run_lifecycle_query(date_from, date_to, IntervalType.day) - statuses = [res["status"] for res in response["result"]] + statuses = [res["status"] for res in response.result] self.assertEqual(["new", "returning", "resurrecting", "dormant"], statuses) self.assertEqual( @@ -357,5 +260,5 @@ def test_lifecycle_query_whole_range(self): "status": "dormant", }, ], - response["result"], + response.result, ) diff --git a/posthog/hogql_queries/test/test_query_date_range.py b/posthog/hogql_queries/test/test_query_date_range.py index 82966cc5f1bff..42787912887b2 100644 --- a/posthog/hogql_queries/test/test_query_date_range.py +++ b/posthog/hogql_queries/test/test_query_date_range.py @@ -10,32 +10,17 @@ def test_parsed_date(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-23T00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25T23:59:59.999999Z"), - ) + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z")) + self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T23:59:59.999999Z")) def test_parsed_date_hour(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z")) self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-23T00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25T00:59:59.999999Z"), + query_date_range.date_to(), parser.isoparse("2021-08-25T00:59:59.999999Z") ) # ensure last hour is included def test_parsed_date_middle_of_hour(self): @@ -43,34 +28,25 @@ def test_parsed_date_middle_of_hour(self): date_range = DateRange(date_from="2021-08-23 05:00:00", date_to="2021-08-26 07:00:00") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - - self.assertEqual(parsed_date_from, parser.isoparse("2021-08-23 05:00:00Z")) - self.assertEqual(parsed_date_to, parser.isoparse("2021-08-26 07:00:00Z")) # ensure last hour is included + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23 05:00:00Z")) + self.assertEqual( + query_date_range.date_to(), parser.isoparse("2021-08-26 07:00:00Z") + ) # ensure last hour is included def test_parsed_date_week(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-7d") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.week, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-18 00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25 23:59:59.999999Z"), - ) + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-18 00:00:00Z")) + self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25 23:59:59.999999Z")) def test_is_hourly(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now) - self.assertFalse(query_date_range.is_hourly()) + self.assertFalse(query_date_range.is_hourly) query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - self.assertTrue(query_date_range.is_hourly()) + self.assertTrue(query_date_range.is_hourly) diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py new file mode 100644 index 0000000000000..bfe4b079b9945 --- /dev/null +++ b/posthog/migrations/0350_add_notebook_text_content.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.19 on 2023-09-12 18:09 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("posthog", "0349_update_survey_query_name"), + ] + + operations = [ + migrations.AddField( + model_name="notebook", + name="text_content", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index ba47b2c326ff1..f3b36e2c3dbd0 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -99,7 +99,7 @@ class Meta: field_exclusions: Dict[ActivityScope, List[str]] = { - "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by"], + "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"], "FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"], "Person": [ "id", diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py index dde92fddab944..490645909df26 100644 --- a/posthog/models/notebook/notebook.py +++ b/posthog/models/notebook/notebook.py @@ -12,6 +12,7 @@ class Notebook(UUIDModel): team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) title: models.CharField = models.CharField(max_length=256, blank=True, null=True) content: JSONField = JSONField(default=None, null=True, blank=True) + text_content: models.TextField = models.TextField(blank=True, null=True) deleted: models.BooleanField = models.BooleanField(default=False) version: models.IntegerField = models.IntegerField(default=0) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) diff --git a/posthog/schema.py b/posthog/schema.py index 72b581e8c863c..e5a4f6a6ab9b2 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -594,6 +594,14 @@ class Config: toggledLifecycles: Optional[List[LifecycleToggle]] = None +class LifecycleQueryResponse(BaseModel): + class Config: + extra = Extra.forbid + + result: List[Dict[str, Any]] + timings: Optional[List[QueryTiming]] = None + + class PersonPropertyFilter(BaseModel): class Config: extra = Extra.forbid @@ -1143,6 +1151,7 @@ class Config: PropertyGroupFilter, ] ] = Field(None, description="Property filters for all series") + response: Optional[LifecycleQueryResponse] = None samplingFactor: Optional[float] = Field(None, description="Sampling rate") series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")