diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 4af802f0e5612..0000000000000 --- a/.coveragerc +++ /dev/null @@ -1,10 +0,0 @@ -[run] -source = - posthog/ - ee/ - -branch = true - -omit = - */migrations/* - manage.py diff --git a/.deepsource.toml b/.deepsource.toml deleted file mode 100644 index bdcdfe942e552..0000000000000 --- a/.deepsource.toml +++ /dev/null @@ -1,26 +0,0 @@ -version = 1 - -test_patterns = [ - "**/test_*.py", -] - -exclude_patterns = [ - "**/migrations/*.py", -] - -[[analyzers]] -name = "python" -enabled = true - - [analyzers.meta] - runtime_version = "3.x.x" - -[[analyzers]] -name = "docker" -enabled = true - - [analyzers.meta] - dockerfile_paths = [ - "preview.Dockerfile", - "production.Dockerfile", - ] diff --git a/.environment b/.environment deleted file mode 100644 index c678756de8d30..0000000000000 --- a/.environment +++ /dev/null @@ -1 +0,0 @@ -export SECRET_KEY=$PLATFORM_PROJECT_ENTROPY \ No newline at end of file diff --git a/.github/workflows/ci-e2e.yml b/.github/workflows/ci-e2e.yml index 07bd48d32654b..f00214da5d20a 100644 --- a/.github/workflows/ci-e2e.yml +++ b/.github/workflows/ci-e2e.yml @@ -67,11 +67,30 @@ jobs: id: chunk run: echo "chunks=$(ls cypress/e2e/* | jq --slurp --raw-input -c 'split("\n")[:-1] | _nwise(3) | join("\n")' | jq --slurp -c .)" >> $GITHUB_OUTPUT + container: + name: Build and cache container image + runs-on: ubuntu-latest + timeout-minutes: 60 + needs: [changes] + permissions: + contents: read + id-token: write # allow issuing OIDC tokens for this workflow run + steps: + - name: Checkout + if: needs.changes.outputs.shouldTriggerCypress == 'true' + uses: actions/checkout@v3 + - name: Get Docker image cached in Depot + if: needs.changes.outputs.shouldTriggerCypress == 'true' + # Build the container image in preparation for the E2E tests + uses: ./.github/actions/build-n-cache-image + with: + actions-id-token-request-url: ${{ env.ACTIONS_ID_TOKEN_REQUEST_URL }} + cypress: name: Cypress E2E tests (${{ strategy.job-index }}) runs-on: ubuntu-latest timeout-minutes: 60 - needs: [chunks, changes] + needs: [chunks, changes, container] permissions: id-token: write # allow issuing OIDC tokens for this workflow run diff --git a/.github/workflows/customer-data-pipeline.yml b/.github/workflows/customer-data-pipeline.yml index 6a179053a3030..ff60596f2193a 100644 --- a/.github/workflows/customer-data-pipeline.yml +++ b/.github/workflows/customer-data-pipeline.yml @@ -46,7 +46,7 @@ jobs: images: ghcr.io/${{ steps.lowercase.outputs.repository }}/cdp # Make the image tags used for docker cache. We use this rather than - # ${{ github.repository }} directly because the repository + # ${{ github.repository }} directly because the repository # organization name is has upper case characters, which are not # allowed in docker image names. - uses: docker/metadata-action@v4 diff --git a/.github/workflows/storybook-chromatic.yml b/.github/workflows/storybook-chromatic.yml index 0ad36ae9ebf9f..19b153aa0bfda 100644 --- a/.github/workflows/storybook-chromatic.yml +++ b/.github/workflows/storybook-chromatic.yml @@ -165,8 +165,24 @@ jobs: if [ $ADDED -gt 0 ] || [ $MODIFIED -gt 0 ]; then echo "Snapshots updated ($ADDED new, $MODIFIED changed), running OptiPNG" apt update && apt install -y optipng - git add frontend/__snapshots__/ playwright/ - pnpm lint-staged + optipng -clobber -o4 -strip all + + # we don't want to _always_ run OptiPNG + # so, we run it after checking for a diff + # but, the files we diffed might then be changed by OptiPNG + # and as a result they might no longer be different... + + # we check again + git diff --name-status frontend/__snapshots__/ # For debugging + ADDED=$(git diff --name-status frontend/__snapshots__/ | grep '^A' | wc -l) + MODIFIED=$(git diff --name-status frontend/__snapshots__/ | grep '^M' | wc -l) + DELETED=$(git diff --name-status frontend/__snapshots__/ | grep '^D' | wc -l) + TOTAL=$(git diff --name-status frontend/__snapshots__/ | wc -l) + + if [ $ADDED -gt 0 ] || [ $MODIFIED -gt 0 ]; then + echo "Snapshots updated ($ADDED new, $MODIFIED changed), _even after_ running OptiPNG" + git add frontend/__snapshots__/ playwright/ + fi fi echo "${{ matrix.browser }}-${{ matrix.shard }}-added=$ADDED" >> $GITHUB_OUTPUT diff --git a/.storybook/preview.tsx b/.storybook/preview.tsx index 0cda6703cfd37..9b0a76da1d367 100644 --- a/.storybook/preview.tsx +++ b/.storybook/preview.tsx @@ -15,6 +15,17 @@ const setupMsw = () => { // Make sure the msw worker is started worker.start({ quiet: true, + onUnhandledRequest(request, print) { + // MSW warns on all unhandled requests, but we don't necessarily care + const pathAllowList = ['/images/'] + + if (pathAllowList.some((path) => request.url.pathname.startsWith(path))) { + return + } + + // Otherwise, default MSW warning behavior + print.warning() + }, }) ;(window as any).__mockServiceWorker = worker ;(window as any).POSTHOG_APP_CONTEXT = getStorybookAppContext() diff --git a/.storybook/test-runner.ts b/.storybook/test-runner.ts index 464e8d83c3e08..9327e29f625a3 100644 --- a/.storybook/test-runner.ts +++ b/.storybook/test-runner.ts @@ -57,6 +57,7 @@ const LOADER_SELECTORS = [ '.LemonTableLoader', '[aria-busy="true"]', '.SessionRecordingPlayer--buffering', + '.Lettermark--unknown', ] const customSnapshotsDir = `${process.cwd()}/frontend/__snapshots__` @@ -207,7 +208,8 @@ async function expectLocatorToMatchStorySnapshot( // Compare structural similarity instead of raw pixels - reducing false positives // See https://github.com/americanexpress/jest-image-snapshot#recommendations-when-using-ssim-comparison comparisonMethod: 'ssim', - failureThreshold: 0.0003, + // 0.01 would be a 1% difference + failureThreshold: 0.01, failureThresholdType: 'percent', }) } diff --git a/.test_durations b/.test_durations deleted file mode 100644 index 94306cbc497bc..0000000000000 --- a/.test_durations +++ /dev/null @@ -1 +0,0 @@ -{"ee/api/test/test_capture.py::TestCaptureAPI::test_determine_team_from_request_data_ch": 3.4122722329999533, "ee/api/test/test_capture.py::TestCaptureAPI::test_produce_to_kafka": 0.15640384599998924, "ee/api/test/test_capture.py::TestCaptureAPI::test_unable_to_fetch_team": 0.02767883699993945, "ee/api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_allowed_for_project_member": 0.3054151560000946, "ee/api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_forbidden_for_org_admin": 0.0507530999999517, "ee/api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_forbidden_for_project_outsider": 0.04411500799994883, "ee/api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_shared_dashboard_in_private_project": 0.07278180200000861, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_retrieve_create_event_definition": 0.3000420400001076, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_retrieve_existing_event_definition": 0.051290033000100266, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_search_event_definition": 0.12399333000018942, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_update_event_definition": 0.058352151000008234, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_update_event_without_license": 0.04512456800023301, "ee/api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_with_expired_license": 0.04844436400014729, "ee/api/test/test_hooks.py::TestHooksAPI::test_create_hook": 0.4614861830001473, "ee/api/test/test_hooks.py::TestHooksAPI::test_create_hook_with_resource_id": 0.24908502599987514, "ee/api/test/test_hooks.py::TestHooksAPI::test_delete_hook": 0.24549151300004723, "ee/api/test/test_license.py::TestLicenseAPI::test_can_create_license": 0.2613849530000607, "ee/api/test/test_license.py::TestLicenseAPI::test_can_list_and_retrieve_licenses": 0.03875799099989763, "ee/api/test/test_license.py::TestLicenseAPI::test_friendly_error_when_license_key_is_invalid": 0.03096323099998699, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_create_organization": 0.39356956799997533, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_create_two_similarly_named_organizations": 0.08080907700013995, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_last_organization": 0.12931684499994844, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_organization_owning": 0.10213543700001537, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_second_managed_organization": 0.11091855899985603, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_has_license": 0.023947615999986738, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_license_expired": 0.1589728980000018, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_no_license": 0.017647444000090218, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_delete_organization_not_belonging_to": 0.07041076400003021, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_delete_organization_not_owning": 0.043380664999972396, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_update_organization_not_belonging_to": 0.06928309499994612, "ee/api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_update_org": 0.14897150600017994, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_filter_property_definitions": 0.2764710449998802, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_retrieve_create_property_definition": 0.05701777100000527, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_retrieve_existing_property_definition": 0.04555449299994052, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_search_property_definition": 0.11548352700003761, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_update_property_definition": 0.053584268000008706, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_update_property_without_license": 0.0414815520001639, "ee/api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_with_expired_license": 0.0471586879999677, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_create_project": 0.29927609399999255, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_open_team_as_org_member_but_project_admin_forbidden": 0.25988916499977677, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_private_team_as_org_member_but_project_admin_allowed": 0.31280061100017065, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_second_team_as_org_admin_allowed": 0.2947191270000076, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_team_as_org_admin_allowed": 0.28631400100005067, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_team_as_org_member_forbidden": 0.24558740699990267, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_admin_allowed": 0.2501691469998377, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_member_and_project_admin_forbidden": 0.24232559099993978, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_member_forbidden": 0.2495731799999703, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_admin_allowed": 0.24469330299996273, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_member_and_project_admin_forbidden": 0.24642732299992076, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_member_forbidden": 0.2338159949999863, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_nonexistent_team": 0.23022938799999793, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member": 0.24045527900000252, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member_and_project_admin": 0.23917950299994573, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member_and_project_member": 0.23673856300013085, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_admin_works": 0.24385007400007908, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_member_works": 0.23670156100001805, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_outsider": 0.23722019199988154, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_list_teams_restricted_ones_hidden": 0.24700385400001323, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_no_delete_team_not_administrating_organization": 0.24993142500011345, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_no_delete_team_not_belonging_to_organization": 0.4943581330001052, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_non_admin_cannot_create_project": 0.23412531200006015, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_as_org_member_and_project_member_allowed": 0.2504248530000268, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_as_org_member_forbidden": 0.2445900170000641, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_current_as_org_outsider_forbidden": 0.2498482190001141, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_id_as_org_outsider_forbidden": 0.249083574999986, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_project_as_org_member_allowed": 0.25004543100010324, "ee/api/test/test_team.py::TestProjectEnterpriseAPI::test_user_that_does_not_belong_to_an_org_cannot_create_a_project": 0.25705173500000456, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_org_admin_allowed": 0.2962390970000115, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_project_admin_allowed": 0.2696964650000382, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_project_member_forbidden": 0.25836881099985476, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_admin_allowed": 0.26233274999992773, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_admin_and_project_member_allowed": 0.2789111210000783, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_and_project_member_forbidden": 0.24943651699993552, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_but_project_admin_allowed": 0.2690339479999011, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_forbidden": 0.25450550900006874, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_owner_allowed": 0.26489671000001636, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_non_current_project_allowed": 0.2612459989999252, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_non_private_project_forbidden": 0.26404286000001775, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_nonexistent_project_forbidden": 0.24467664100018283, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_project_in_outside_organization_forbidden": 0.27798216600001524, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_project_that_is_not_organization_member_forbidden": 0.2871032939998486, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_yourself_as_org_admin_forbidden": 0.25712405999991006, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_yourself_as_org_member_forbidden": 0.2535015509999994, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_demote_yourself_as_org_member_and_project_admin_forbidden": 0.27120667400004095, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_admin_allowed": 0.24760580999998183, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_admin_member": 0.2570391560000189, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_organization_outsider": 0.2384900839999773, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_project_outsider": 0.25019495799995184, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_admin_allowed": 0.2622965599999816, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_member_allowed": 0.24929880699994555, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_member_but_project_admin_allowed": 0.25404128200011655, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_member_but_project_admin_allowed": 0.2635854330000029, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_member_forbidden": 0.25542446300005395, "ee/api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_owner_allowed": 0.28908850899995286, "ee/clickhouse/materialized_columns/test/test_analyze.py::TestMaterializedColumnsAnalyze::test_query_class": 0.2780182679999825, "ee/clickhouse/materialized_columns/test/test_analyze.py::TestMaterializedColumnsAnalyze::test_query_class_edge_cases": 0.2532993400000123, "ee/clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_backfilling_data": 2.2649386519999553, "ee/clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_caching_and_materializing": 2.04398621200005, "ee/clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_column_types": 1.8644900170000938, "ee/clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_get_columns_default": 1.2085611839999046, "ee/clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_materialized_column_naming": 1.9727072100000669, "ee/clickhouse/models/test/test_action.py::TestActions::test_attributes": 0.305633364000073, "ee/clickhouse/models/test/test_action.py::TestActions::test_empty_selector_same_as_null": 0.28807818400002816, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_events_by_url": 0.4100594940001656, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_direct_decendant_ordering": 0.3475152020001815, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_id": 0.3337548340000467, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_nested": 0.3481742389999454, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_nth_child": 0.32776429999989887, "ee/clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_star": 0.3322757509998837, "ee/clickhouse/models/test/test_action.py::TestActions::test_no_person_leakage_from_other_teams": 0.2969962819998955, "ee/clickhouse/models/test/test_action.py::TestActions::test_no_steps": 0.2747047359999897, "ee/clickhouse/models/test/test_action.py::TestActions::test_person_property": 0.363022767999837, "ee/clickhouse/models/test/test_action.py::TestActions::test_person_with_different_distinct_id": 0.2866569039999831, "ee/clickhouse/models/test/test_action.py::TestActions::test_with_class": 0.27128364600002897, "ee/clickhouse/models/test/test_action.py::TestActions::test_with_class_with_escaped_slashes": 0.2590381620001381, "ee/clickhouse/models/test/test_action.py::TestActions::test_with_class_with_escaped_symbols": 0.2545901150000418, "ee/clickhouse/models/test/test_action.py::TestActions::test_with_normal_filters": 0.3334807189999083, "ee/clickhouse/models/test/test_action.py::TestActionFormat::test_double": 0.30025336300002436, "ee/clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_contains_url": 0.2780357220000269, "ee/clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_exact_url": 0.28150251600004594, "ee/clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_regex_url": 0.27450472600003195, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_clickhouse_empty_query": 0.3938014860001431, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohort_change": 1.2360476100000142, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohort_get_person_ids_by_cohort_id": 0.42909032500006106, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_action_basic": 1.0718043900001248, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_action_count": 1.7655990750000683, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_basic": 0.7087321549998933, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_deleted_person": 1.3573256099999753, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_prop_changed": 1.2782052049999493, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_timestamp": 0.6951178630000641, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_cyclic_cohort_filter": 0.6537181539999892, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_nonexistent_other_cohort_filter": 0.6918364830000883, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_valid_other_cohort_filter": 2.225866351000036, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_insert_by_distinct_id_or_email": 0.6286306940000941, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic": 0.3631944469999553, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_action": 0.3576625179999837, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_action_days": 0.4586738120000291, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_event_days": 0.4226943759998676, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_multiple_groups": 0.3471757960001014, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_with_negation": 0.32518419199993787, "ee/clickhouse/models/test/test_cohort.py::TestCohort::test_static_cohort_precalculated": 0.6061559610000131, "ee/clickhouse/models/test/test_dead_letter_queue.py::TestDeadLetterQueue::test_direct_table_insert": 0.2981627890000027, "ee/clickhouse/models/test/test_dead_letter_queue.py::TestDeadLetterQueue::test_kafka_insert": 2.218743123999957, "ee/clickhouse/models/test/test_element.py::TestClickhouseElement::test_broken_class_names": 0.23260135600003196, "ee/clickhouse/models/test/test_element.py::TestClickhouseElement::test_elements_to_string": 0.22320260800006508, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_old_style_properties": 0.21912436899992827, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_recursive_cohort": 0.01232651899999837, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_cohorts": 0.4339205880000918, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_entities": 0.008252580999965176, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_hasdone_cohort": 0.007858159000079468, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_multi_group_cohort": 0.008058771000037268, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_no_such_cohort": 0.006307767000066633, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_not_ee": 0.005643428000212225, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_static_cohort": 0.0074344329999576075, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_simplify_test_accounts": 0.006698390000110521, "ee/clickhouse/models/test/test_filters.py::TestFilters::test_to_dict": 0.0066974900000786874, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_boolean_filters": 0.2694861039999523, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_contains": 0.2643305039999859, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_does_not_contain": 0.278087206999885, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_element_filter": 0.29178920400011066, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_element_selectors": 0.25283833499997854, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_filter_out_team_members": 0.3447479909999629, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_incomplete_data": 0.20135673399988718, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_invalid_regex": 0.26393098100015777, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_is_not": 0.2742751830000998, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_set_and_is_set": 0.2779347969999435, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_true_false": 0.25105193100012, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_json_object": 0.3020324010000195, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_multiple": 0.2585057640000059, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_multiple_equality": 0.2842406639999808, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_numerical": 0.31012587399993663, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_person_cohort_properties": 0.3752441679999947, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_regex": 0.26722437300009005, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_simple": 0.2769401400000788, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_true_false": 0.2753736470001513, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_user_properties": 0.4389398800000208, "ee/clickhouse/models/test/test_filters.py::TestFiltering::test_user_properties_numerical": 0.3932783100000279, "ee/clickhouse/models/test/test_filters.py::PGTestFilters::test_old_style_properties": 0.2369076289999157, "ee/clickhouse/models/test/test_filters.py::PGTestFilters::test_simplify_test_accounts": 0.007483224999873528, "ee/clickhouse/models/test/test_filters.py::PGTestFilters::test_to_dict": 0.0066589779999048915, "ee/clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_log_limit_works": 0.25374748299987004, "ee/clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_log_search_works": 0.039133418000005804, "ee/clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_simple_log_is_fetched": 0.02878853100003198, "ee/clickhouse/models/test/test_property.py::TestPropFormat::test_prop_decimals": 0.38779598199994325, "ee/clickhouse/models/test/test_property.py::TestPropFormat::test_prop_element": 0.5515377639999315, "ee/clickhouse/models/test/test_property.py::TestPropFormat::test_prop_event": 0.3317341040000201, "ee/clickhouse/models/test/test_property.py::TestPropFormat::test_prop_ints_saved_as_strings": 0.4109128919999421, "ee/clickhouse/models/test/test_property.py::TestPropFormat::test_prop_person": 0.3209162919999926, "ee/clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_event_denormalized": 0.8594906179999953, "ee/clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_event_denormalized_ints": 0.3324483449999889, "ee/clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_person_denormalized": 0.4328515360000438, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property0-expected_event_indexes0]": 0.2957624640000631, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property1-expected_event_indexes1]": 0.3074024239999744, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property2-expected_event_indexes2]": 0.3323187369999232, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property3-expected_event_indexes3]": 0.2961423860000423, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property4-expected_event_indexes4]": 0.290731378999908, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property5-expected_event_indexes5]": 0.29532083900005546, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property6-expected_event_indexes6]": 0.28774381200003063, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property7-expected_event_indexes7]": 0.2889992819999634, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property8-expected_event_indexes8]": 0.2845401279998896, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property9-expected_event_indexes9]": 0.28303444099992703, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property0-expected_event_indexes0]": 0.7811942019999378, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property1-expected_event_indexes1]": 0.3160298430000239, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property2-expected_event_indexes2]": 0.3157902299997204, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property3-expected_event_indexes3]": 0.3040580560000308, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property4-expected_event_indexes4]": 0.30667290600001706, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property5-expected_event_indexes5]": 0.3067553119999502, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property6-expected_event_indexes6]": 0.30191623200005324, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property7-expected_event_indexes7]": 0.3393605830000297, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property8-expected_event_indexes8]": 0.321160137999982, "ee/clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property9-expected_event_indexes9]": 0.31001489799996307, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 1.078508217000035, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.783673001000011, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 1.012095904000148, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_cohort_breakdown": 4.043667342999925, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_cohort_breakdown_materialized": 5.420725929000014, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event": 1.989234081999939, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_materialized": 3.1571343780000234, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_no_type": 2.013538778000111, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 2.988358690000041, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 1.43275230200004, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.2715992219999634, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 1.7754772360001425, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_other": 2.006685664000088, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 3.015030266999929, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_limit": 3.2913461809999944, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_limit_materialized": 5.136119527999881, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_person": 2.735624440000038, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_person_materialized": 3.868653906000077, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 1.491645050000102, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 3.162512784000114, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 4.832301138000048, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 2.3501407210000025, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_step_conversion_times": 0.6657537549998551, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_times_with_different_conversion_windows": 1.4371063159999267, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_with_multiple_incomplete_tries": 0.8414155469999969, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_exclusions_between_steps": 6.19118383, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_multiple_exclusions_between_steps": 5.235706039999968, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_with_repeat_steps": 3.5177383219999, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_with_repeat_steps_out_of_order_events": 3.6297619970000596, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_default_funnel_days": 0.4311247259998936, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_derivative_steps": 0.9098471130000689, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_derivative_steps_materialized": 1.4736193389999244, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_repeat_step_updated_param": 1.1744274700000688, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_repeat_steps": 0.8256870169998365, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_conversion_window": 1.941807888999847, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_default": 0.457138329999907, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_events": 1.497510004999981, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusion_no_end_event": 1.0378132280000045, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_full_window": 0.9682012870000563, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_invalid_params": 0.21746392000000014, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_with_actions": 1.0386791759999596, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_with_actions_materialized": 1.463662830999965, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_by_action_with_person_properties": 0.8201529910002137, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_by_action_with_person_properties_materialized": 1.0440497829998776, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_test_accounts": 0.547256755999797, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_test_accounts_materialized": 0.6288645909999104, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_multiple_actions": 0.6093829610000512, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_multiple_actions_materialized": 0.8101937129999897, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_no_events": 0.22958222099998693, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_person_prop": 0.6995953959999497, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_person_prop_materialized": 0.9251103810000814, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters": 0.6936022520000051, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_materialized": 0.9841340140000057, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_per_entity": 0.7868983670000489, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_per_entity_materialized": 1.0498301260000744, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_skipped_step": 0.5648429799999803, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions": 1.0159573589999127, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_and_events": 0.0022172279999495004, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_and_events_materialized": 0.22825194799997917, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_materialized": 1.4004982760000075, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_denormalised_properties": 0.5107160390001582, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_elements_chain": 1.4146405200000345, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_entity_person_property_filters": 0.5841914230001066, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_entity_person_property_filters_materialized": 0.6993902919999755, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_matching_properties": 3.7941916690000426, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_matching_properties_materialized": 5.831684114000041, "ee/clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_single_step": 0.43438579700000446, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_events": 4.385965505000058, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_properties": 1.8310647380001228, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_properties_materialized": 2.4829929260000654, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_multiple_properties": 3.0872295520000534, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_multiple_properties_materialized": 4.024911208000162, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_properties_raises_validation_error": 0.2877717890000895, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_discarding_insignificant_events": 1.6137499870001193, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_events_within_conversion_window_for_correlation": 0.7259028970000827, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties": 3.0256051159999515, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_autocapture": 2.5083253620000505, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_autocapture_materialized": 4.304017837999822, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_exclusions": 1.6548500520000289, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_materialized": 4.7832929839999, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_no_divide_by_zero_errors": 0.8458349899999575, "ee/clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationPersons::test_basic_funnel_correlation_with_events": 3.3022785139999087, "ee/clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationPersons::test_people_arent_returned_multiple_times": 0.6194627189997846, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_basic_offset": 7.4679280629998175, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step": 2.0018184030000157, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdown_person": 2.707419863000041, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdown_person_materialized": 3.9608897980003803, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdowns": 1.7009337240001514, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdowns_materialized": 2.5235194669999146, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_funnel_cohort_breakdown_persons": 0.7514521130001413, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_funnel_cohort_breakdown_persons_materialized": 0.9805009030003475, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_last_step": 2.0504414980000547, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_last_step_dropoff": 1.9935798980000072, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_second_step_dropoff": 1.9569046689998686, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_are_equivalent_to_funnel_step": 4.226075908999974, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_overrides_funnel_step": 1.9434825630000887, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_where_funnel_step_equivalence_isnt_possible": 2.7207068549998894, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 1.1236643930001264, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.5644911669999146, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 1.076353946999916, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_cohort_breakdown": 4.847956962999888, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_cohort_breakdown_materialized": 6.175481664000245, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event": 1.7884084490001442, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_materialized": 2.4616166979999434, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_no_type": 1.669936473000007, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 2.418227381000179, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 1.4016006029999062, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.4593288470000516, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 2.0258372070002224, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_other": 1.74509642299995, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 2.5626580629998443, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_limit": 3.385648183000285, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_limit_materialized": 4.914983846999803, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_person": 2.2608127130001776, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_person_materialized": 3.3384852640001554, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 1.5820777789999738, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 2.7351081209999393, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 4.016051214999834, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 2.176098355000022, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_strict_breakdown_events_with_multiple_properties": 1.471916070999896, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_step_conversion_times": 0.6682651129999613, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_times_with_different_conversion_windows": 1.4293016009999064, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_with_multiple_incomplete_tries": 0.7461131239999759, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_advanced_strict_funnel": 2.4217684980001195, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_basic_strict_funnel": 1.5853855069999554, "ee/clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_basic_strict_funnel_conversion_times": 1.216331930000024, "ee/clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_first_step": 2.2074801200001275, "ee/clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_second_step": 2.0549394879999454, "ee/clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_second_step_dropoff": 2.1008856820001256, "ee/clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_third_step": 2.021816102999992, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_single_step": 2.7636818409998796, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_single_step_duplicate_events": 0.0019620119999217422, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_total": 4.720440585999995, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_basic_strict": 2.534608437000088, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_basic_unordered": 6.098391111999945, "ee/clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_custom_bin_count_single_step": 2.1733683489999294, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_all_date_range": 0.9503746010000214, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_all_results_for_day_interval": 0.8864150009999321, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_day_interval": 0.8417590869999003, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_from_second_step": 0.70795024500012, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_step_breakdown_event": 0.7512618979997114, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_step_breakdown_person": 0.8645953449997705, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_trend_cohort_breakdown": 1.0014512499999455, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_hour_interval": 0.5218155439997645, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_month_interval": 0.8407360670000799, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_no_event_in_period": 0.5214756240002316, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows": 1.2615565769997374, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows_in_strict_funnel": 0.6489340870000433, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows_in_unordered_funnel": 2.179022748000307, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_only_one_user_reached_one_step": 1.3171744910000598, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_period_not_final": 0.5554782740000519, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_steps_performed_in_period_but_in_reverse": 0.5603675560000738, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_to_second_step": 0.6995264180002323, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_two_runs_by_single_user_in_one_period": 0.5826955500001532, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_week_interval": 0.8130320910001956, "ee/clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_window_size_one_day": 0.8851428680000026, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 1.106617993999862, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.6001500820000274, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 1.0399460520000048, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_cohort_breakdown": 7.9801272100000915, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_cohort_breakdown_materialized": 10.960380491000024, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event": 3.9825215820001176, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_materialized": 6.1375313139999434, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_no_type": 4.114238796999871, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 5.858587421000038, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 2.4665159210001093, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.4768252750002375, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 1.979720714999985, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_other": 4.212692176000019, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 6.207084612000244, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_limit": 3.7916767340000206, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_limit_materialized": 5.7921931400001085, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_person": 6.225573549000046, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_person_materialized": 9.011092076000068, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 2.5194896080004128, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 6.134760683999957, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 8.977437395000152, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 3.4509319530000084, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_step_conversion_times": 0.9826531189996786, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_times_with_different_conversion_windows": 2.3925724220000575, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_with_multiple_incomplete_tries": 1.4762856330000886, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_advanced_funnel_multiple_exclusions_between_steps": 9.308819315000164, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_basic_unordered_funnel": 4.205654740999762, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_basic_unordered_funnel_conversion_times": 2.7140712579998763, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_big_multi_step_unordered_funnel": 5.020607273000223, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_funnel_exclusions_full_window": 1.501802815000019, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_funnel_exclusions_invalid_params": 0.22715396200010218, "ee/clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_single_event_unordered_funnel": 0.512372287000062, "ee/clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_first_step": 2.5514231310000923, "ee/clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_invalid_steps": 0.3232778290000624, "ee/clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_last_step": 2.351788750999958, "ee/clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_last_step_dropoff": 2.3722616219999964, "ee/clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_second_step_dropoff": 2.3780847600000925, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_chunked_snapshots": 5.355217191000293, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_chunked_snapshots_with_specific_limit_and_offset": 3.079139939999777, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata": 0.29487378700014233, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata_does_not_leak_teams": 0.2997519719999673, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata_for_chunked_snapshots": 3.3678246719998697, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata_for_non_existant_session_id": 0.24665759399999843, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots": 0.2789712670000881, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots_does_not_leak_teams": 0.26176136999993105, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots_with_no_such_session": 0.21774591899975348, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_action_filter": 0.6400951939999686, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_filters_at_once": 0.42132641600005627, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_sessions_recording_object_keys": 0.27893696599994655, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_sessions_recording_object_keys_with_entity_filter": 0.34368251799969585, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_basic_query": 0.2925046510001721, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_date_from_filter": 0.3438917290000063, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_date_to_filter": 0.32966690500006735, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_duration_filter": 0.34795046499993987, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter": 0.45357068500015885, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_properties": 0.47338933400033056, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_multiple_event_filters": 0.4976994430001014, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_pagination": 0.43227315100011765, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_person_id_filter": 0.3301338090000172, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recording_that_spans_time_bounds": 0.26656822999984797, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recording_without_fullsnapshot_dont_appear": 0.2763639970000895, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recordings_dont_leak_data_between_teams": 0.2823512440002105, "ee/clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_teams_dont_leak_event_filter": 0.34781993300020986, "ee/clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props": 0.4240229439997165, "ee/clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props_materialized": 0.9909760599998663, "ee/clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props_with_entity_filter": 1.0490752230000453, "ee/clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_materialized_columns_checks": 0.5671407290001298, "ee/clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_properties_used_in_filter": 0.2245541970000886, "ee/clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_properties_used_in_filter_with_actions": 0.22966599400001542, "ee/clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_should_query_element_chain_column": 0.22840512099992338, "ee/clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_should_query_element_chain_column_with_actions": 0.2599629470000764, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_account_filters": 0.5829281419999006, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_action_with_person_property_filter": 0.4718200100001013, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_basic_event_filter": 0.31612289800000326, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_cohort_filter": 0.39950662499995815, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_denormalised_props": 0.4242033549999178, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_element": 0.3523264969999218, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_entity_filtered_by_cohort": 0.48358562899966273, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_event_properties_filter": 0.3767018349999489, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_person_properties_filter": 0.463810181999861, "ee/clickhouse/queries/test/test_event_query.py::TestEventQuery::test_static_cohort_filter": 0.3803838469998482, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_filter_test_accounts": 2.412463325999852, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend": 0.9208094709997567, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_action": 0.9175159799997346, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_all_time": 0.9897915679998732, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_months": 0.9456509099998129, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_people": 1.606526815999814, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_people_paginated": 6.768739425000149, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_prop_filtering": 1.0175167859999874, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_weeks": 0.9423590289998174, "ee/clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trends_distinct_id_repeat": 0.8061594349999268, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_current_url_paths_and_logic": 3.6759169970000585, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_custom_event_paths": 0.763400655000396, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_denormalized_properties": 3.735884242999873, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_exclusion_filters_with_wildcards": 1.0389361179998104, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_inclusion_exclusion_filters": 2.402655961999926, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_inclusion_exclusion_filters_across_single_person": 1.5601282509999237, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_dropoff": 8.76244784299979, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step": 4.328247638000221, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step_limit": 14.897552395000275, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step_respects_conversion_window": 7.797880035000162, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_before_dropoff": 4.393292299999985, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_before_step": 4.380504735999921, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_between_step": 9.845762665000166, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping": 3.494847931000095, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping_replacement": 1.0172335239999484, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping_replacement_multiple": 1.2942971729999044, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_event_ordering": 4.323235831000147, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_grouping_across_people": 0.7331012700001338, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_grouping_with_evil_input": 0.6439357049996488, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_min_edge_weight": 4.122319311999945, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_removes_duplicates": 0.6541583969999465, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_respect_session_limits": 0.6215915100001439, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_end": 0.758378329000152, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_end_materialized": 1.0690749079999478, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_in_window": 0.7189631339999778, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_person_dropoffs": 5.0039126689996465, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_properties_filter": 0.784160909999855, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start": 1.464056790000086, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_and_end": 2.967617168000288, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_and_end_materialized": 3.84066483100014, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_dropping_orphaned_edges": 1.353246177000301, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_properties_queried_using_path_filter": 0.35585270899991883, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_screen_paths": 0.8873554930003138, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_step_conversion_times": 0.6841376240001864, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_step_limit": 3.160860169999978, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest": 0.010203489999867088, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest_with_dangling_and_cross_edges": 0.007467431999884866, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest_with_dangling_edges": 0.005494517999977688, "ee/clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_no_start_point": 0.00558942400016349, "ee/clickhouse/queries/test/test_person_query.py::test_person_query": 0.4368775010000263, "ee/clickhouse/queries/test/test_person_query.py::test_person_query_with_extra_requested_fields": 0.4136416559999816, "ee/clickhouse/queries/test/test_person_query.py::test_person_query_with_entity_filters": 0.40228060000026744, "ee/clickhouse/queries/test/test_person_query.py::test_person_query_with_extra_fields": 0.354152213000134, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_day_interval": 0.7009385969997766, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_filter_test_accounts": 0.7092255790000763, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_first_time_retention": 0.9082447040000261, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_hour_interval": 0.6466767550000441, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_interval_rounding": 0.6302381039997726, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_minute_interval": 0.22697304600023926, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_month_interval": 0.6517703499998788, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_action_start_point": 0.6284906020000562, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_default": 0.5861015469999984, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_event_action": 0.5865528729996186, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_graph": 0.6064644260000023, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_invalid_properties": 0.2471246139998584, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_multiple_events": 0.6558271859998968, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people": 0.5245372839999618, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_first_time": 0.8992322880001211, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_in_period": 0.6224482549998811, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_in_period_first_time": 1.0508572710000408, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_paginated": 13.451846693000107, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_with_properties": 0.6330848729999161, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_with_user_properties": 0.709399391000261, "ee/clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_week_interval": 0.6508730020000257, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_filter_sessions_by_recording_duration_gt": 0.40078271500010487, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_filter_sessions_by_recording_duration_lt": 0.37407546899999033, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_filter_sessions_by_unseen_recording": 0.3732756229999268, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_join_with_session_recordings": 0.442958157999783, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run": 0.3324381559998528, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run_queries_with_specific_limit_and_offset": 1.956364721, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run_sequential_next_urls": 1.975238716000149, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run_session_with_chunks_with_partial_snapshots": 0.30553319799992096, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run_with_no_sessions": 0.208793194999771, "ee/clickhouse/queries/test/test_session_recording.py::TestClickhouseSessionRecording::test_query_run_with_no_such_session": 0.2219356569996762, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_compare": 0.6779186700000537, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_filter_sessions_precalculated_cohort": 1.2336193620001268, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_filter_test_accounts": 0.8481924339996567, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_no_events": 0.31159995000007257, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_sessions_avg_length": 0.7265896900000826, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_sessions_avg_length_interval": 0.9591475619999983, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_sessions_count_buckets": 1.4210503179999705, "ee/clickhouse/queries/test/test_sessions.py::TestClickhouseSessions::test_sessions_count_buckets_default": 0.3002597940001124, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_compare": 0.5753936309999972, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_filter_test_accounts": 0.6387358009999389, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness": 0.6765714929997557, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_action": 0.5529603320001115, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_all_time": 0.5447607580001659, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_entity_filter": 0.5887713059998987, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_hours": 0.5313776819998566, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_minutes": 0.5078811199998654, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_months": 0.5075017989997832, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_people_endpoint": 0.5774776530001873, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_people_paginated": 6.452203972000007, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_people_with_entity_filter": 0.6119255020000764, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_prop_filter": 0.5259782159998849, "ee/clickhouse/queries/test/test_stickiness.py::TestClickhouseStickiness::test_stickiness_weeks": 0.5320777679999082, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_filtering": 0.6119244049998542, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_with_prop": 0.46008807999987766, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_with_prop_materialized": 0.5614032770001813, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_active_user_math": 0.5424876760000643, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_active_user_math_action": 0.5361227050002526, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_against_clashing_entity_and_property_filter_naming": 0.616831904000037, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_against_clashing_entity_and_property_filter_naming_materialized": 0.831069374000208, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_all_time_timerange": 0.6304438960003154, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering": 0.7272097269999449, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_materialized": 1.1833845789999486, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_non_number_resiliency": 0.7260391600000276, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_non_number_resiliency_materialized": 0.9277226990000145, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_bar_chart_by_value": 0.5648162120003235, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_active_user_math": 0.5580836759997965, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_active_user_math_materialized": 0.762654940999937, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_cohort": 1.1284282610004084, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_cohort_materialized": 1.6302445720000378, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_empty_cohort": 0.40686760299990965, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property": 1.0271462860002885, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property_pie": 0.6633727819998967, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property_pie_materialized": 0.870781414000021, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_property_pie": 0.7684049740000773, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filter_by_precalculated_cohort": 1.074485413000275, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering": 0.8340111669999715, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_bar_chart_by_value": 0.5817506379996757, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_bar_chart_by_value_materialized": 1.0058639319997837, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_limit": 0.8280052180002713, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_limit_materialized": 1.0742279970002073, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_materialized": 1.0190874990000793, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons": 0.6013488749999851, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_materialized": 0.7752128560000529, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_with_action_props": 0.6212501020002037, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_with_action_props_materialized": 0.8715044020000278, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_with_properties": 0.5724082710000857, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_with_properties_materialized": 0.9543476030000875, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_label": 0.2195561229998475, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_multiple_cohorts": 1.7884468370000377, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_multiple_cohorts_materialized": 2.3748965209999824, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_single_cohort": 1.0713520359997801, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_single_cohort_materialized": 1.5705844460001117, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_user_props_with_filter": 0.6439859859999615, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_user_props_with_filter_materialized": 0.858218523000005, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_filter": 0.46799335100013195, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_filter_materialized": 0.5790036440000677, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_person_property_filter": 1.0415610800002923, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_person_property_filter_materialized": 1.6696368500001881, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_combine_all_cohort_and_icontains": 0.7134029829996962, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_combine_all_cohort_and_icontains_materialized": 0.9024537650000184, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_custom_range_timerange": 0.56093799700011, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_filtering": 0.8163742950000596, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering": 0.8898883530000603, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_materialized": 1.131941875999928, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_with_prop_filter": 0.9319430889997875, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_with_prop_filter_materialized": 1.2244472360002874, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_day_interval": 0.48737353499973324, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_entity_person_property_filtering": 0.5969085809999797, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_entity_person_property_filtering_materialized": 0.7802345590002915, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_by_precalculated_cohort": 0.9309820860000855, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_events_by_cohort": 0.4568634940001175, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_events_by_cohort_materialized": 0.5870807179999247, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts": 0.7691646369999035, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_cohorts": 0.472497697000108, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_cohorts_materialized": 0.5818420159998823, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_materialized": 1.0807327369998347, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filtering_with_action_props": 0.3790506990001177, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filtering_with_action_props_materialized": 0.464236020999806, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_hour_interval": 0.4573377229999096, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering": 1.0507267039999988, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering_breakdown": 1.5440586730001087, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering_breakdown_materialized": 2.089905372999965, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_rounding": 0.48139874299999974, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_rounding_monthly": 0.4086538349999955, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last14days_timerange": 0.6118394870002248, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last24hours_timerange": 0.5267804679997425, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last30days_timerange": 0.6432717049995063, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last48hours_timerange": 0.5191334240003016, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last7days_timerange": 0.5162335569998504, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last90days_timerange": 0.8535810679998121, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_max_filtering": 0.7589904149999711, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_max_filtering_materialized": 0.9590031010004623, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_median_filtering": 2.070088905000148, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_median_filtering_materialized": 3.055939957999726, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_min_filtering": 0.7582557840003119, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_min_filtering_materialized": 0.9464221140001428, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_minute_interval": 0.43531238700006725, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_month_interval": 0.464955492999934, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_no_props": 0.34244773499972325, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p90_filtering": 2.114751366000064, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p90_filtering_materialized": 3.0940250989997367, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p95_filtering": 2.177623440999696, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p95_filtering_materialized": 3.0076593049998337, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p99_filtering": 2.0972056709999833, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p99_filtering_materialized": 3.0341006749999906, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_per_entity_filtering": 0.6560443570001553, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_per_entity_filtering_materialized": 0.8189616840002145, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_person_property_filtering": 0.6127937549999842, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_person_property_filtering_materialized": 0.7766661350001414, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_previous_month_timerange": 0.5889164729999266, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_property_filtering": 0.5993743779999932, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_property_filtering_materialized": 0.6753276719998667, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_response_empty_if_no_events": 0.48878588000025047, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_sum_filtering": 0.7307318769999256, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_sum_filtering_materialized": 0.9692971799997849, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_this_month_timerange": 0.6139141390001441, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_today_timerange": 0.46324101899995185, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate": 0.5484230519996345, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_cohorts": 0.7019290380001166, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_cohorts_materialized": 0.9196179389998633, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_math": 0.6495090020000589, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_with_math_func": 1.5941470940001636, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_with_math_func_materialized": 2.3929146379998656, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_compare": 0.7365247440002349, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_for_non_existing_action": 0.3446777579997615, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_math_without_math_property": 0.2193382999998903, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day": 0.6313969580000958, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day_48hours": 0.649507506999953, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day_cumulative": 0.5690370470001653, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_regression_filtering_by_action_with_person_properties": 0.8107669440000791, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_regression_filtering_by_action_with_person_properties_materialized": 1.0680490419999842, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_dau": 0.7057902659998945, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_math": 0.5900429639998492, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_math_materialized": 0.8434130340001502, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_week_interval": 0.4929843439997512, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_year_to_date_timerange": 0.6174393409999084, "ee/clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_yesterday_timerange": 0.4836200270001427, "ee/clickhouse/queries/test/test_util.py::test_get_earliest_timestamp": 0.29817101799994816, "ee/clickhouse/queries/test/test_util.py::test_get_earliest_timestamp_with_no_events": 0.2513645149999775, "ee/clickhouse/queries/test/test_util.py::test_parse_breakdown_cohort_query": 0.3039989549999973, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown": 0.841712705999953, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_cohort": 0.9317270039998675, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_counts_of_different_events_one_without_events": 0.7110214580000047, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_mismatching_sizes": 0.8119365850002396, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_compare": 0.8700475400003143, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_cumulative": 0.6687702180001907, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_day_interval": 0.6479343139999401, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_event_properties": 0.670512018999716, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_formula": 1.9149967200000901, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_global_properties": 0.6656928499996866, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_hour_interval": 0.6610860820001108, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_interval_rounding": 0.4810090539999692, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_minute_interval": 0.6461675179998565, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_month_interval": 0.6366708680000102, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_multiple_events": 0.7242845410000882, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_pie": 0.6068751430000248, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_properties_with_escape_params": 0.6762701610000477, "ee/clickhouse/queries/trends/test/test_formula.py::TestFormula::test_week_interval": 0.6385763780001525, "ee/clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_calculate_usage": 0.48941504099957456, "ee/clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_updating_event_properties_or_related_updates_property_definitions": 17.938282477999792, "ee/clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_updating_team_events_or_related_updates_event_definitions": 17.887773481000067, "ee/clickhouse/test/test_client.py::ClickhouseClientTestCase::test_cache_eviction": 0.10384061600007044, "ee/clickhouse/test/test_client.py::ClickhouseClientTestCase::test_caching_client": 0.012063598999702663, "ee/clickhouse/test/test_client.py::ClickhouseClientTestCase::test_client_strips_comments_from_request": 0.014801757999748588, "ee/clickhouse/test/test_middleware.py::TestQueryMiddleware::test_query": 0.919627182999875, "ee/clickhouse/test/test_system_status.py::test_system_status": 0.06140775799963194, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_cant_create_action_with_the_same_name": 0.280579556000248, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_create_action": 0.3841221569998652, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_create_action_event_with_space": 0.2676781090001441, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_create_from_other_domain": 0.4174442859998635, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_get_event_count": 0.3358408580002106, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_http_to_https": 0.2756131700000424, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_update_action": 0.3628241219996653, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionApi::test_update_action_remove_all_steps": 0.27422708800008877, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_active_user_weekly_people": 0.49669747899997674, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_breakdown_by_cohort_people_endpoint": 0.8055679569999938, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_breakdown_by_event_property_none_people_endpoint": 0.6171466370001326, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_breakdown_by_event_property_people_endpoint": 0.6151465210000424, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_breakdown_by_person_property_nones_people_endpoint": 0.6349734699997498, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_breakdown_by_person_property_people_endpoint": 0.6189495419996547, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_day_interval": 0.8621487249999973, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_day_interval_cumulative": 0.8869568599998274, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_filtering_by_person_properties": 0.5980624329999955, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_hour_interval": 1.2998724709998442, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_day": 0.5164292049998949, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_hour": 0.5088375650000216, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_minute": 0.540603310999586, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_month": 0.5334952069997598, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_rounding": 0.2244158010000774, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_interval_week": 0.5511784299997089, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_is_calculating_always_false": 0.41297912399977577, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_minute_interval": 1.0859966129999066, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_month_interval": 1.1027875849999873, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_people_csv": 0.6013007340000058, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_people_cumulative": 3.879905554000061, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_people_endpoint_paginated": 6.516160944999683, "ee/clickhouse/views/test/test_clickhouse_action.py::TestActionPeople::test_week_interval": 1.0997417829998994, "ee/clickhouse/views/test/test_clickhouse_element.py::TestElement::test_element_automatic_order": 0.24567542499994488, "ee/clickhouse/views/test/test_clickhouse_element.py::TestElement::test_element_stats": 0.38568333099988195, "ee/clickhouse/views/test/test_clickhouse_element.py::TestElement::test_event_property_values": 0.33916893700006767, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_action_no_steps": 0.2704922610000722, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_ascending_order_timestamp": 0.43596054199997525, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_before_and_after": 0.6830658480002967, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_custom_event_values": 0.3116981460000261, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_default_descending_order_timestamp": 0.43765403900010824, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_event_property_values": 0.5719874160001837, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_event_sessions_by_id": 0.7569591270000728, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_csv_export_default_limit": 0.47848220300011235, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_csv_export_maximum_limit": 0.4956198950003454, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_csv_export_over_maximum_limit": 0.4748813940002492, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_csv_export_with_param_limit": 0.47210443300036786, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_in_future": 0.38801796599977934, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_nonexistent_cohort_handling": 0.5705475319998641, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_events_sessions_basic": 2.2291892099997312, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_by_nonexisting_person": 0.28956448899998577, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_by_person": 0.3730495299998893, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_events": 0.37916348499993546, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_events_by_event_name": 0.3557926289997795, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_events_by_precalculated_cohort": 1.1151705590000347, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_filter_events_by_properties": 0.3800769369997852, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_get_event_by_id": 0.28024704900008146, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_get_events_with_specified_token": 0.41898639400005777, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_get_single_action": 0.2994937649998519, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_limit": 0.4069234950000009, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_live_action_events": 0.21391980299972602, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_optimize_query": 0.5271046619998287, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_pagination": 4.829561386000023, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_pagination_bounded_date_range": 0.8839354929998535, "ee/clickhouse/views/test/test_clickhouse_event.py::ClickhouseTestEventApi::test_session_events": 0.5555353739998736, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_correlation_endpoint_request_with_no_steps_doesnt_fail": 0.28954336800029523, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_correlation_endpoint_with_properties": 1.866001870999753, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_does_not_include_funnel_steps": 0.9747470580000481, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_does_not_include_historical_events": 0.8163641040000584, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_picks_up_events_for_odds_ratios": 0.8725269580002077, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_is_partitioned_by_team": 1.3908437919999415, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_funnel_correlation_with_event_properties_autocapture": 1.1011329049999858, "ee/clickhouse/views/test/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_requires_authn": 0.22374276599998666, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_format": 0.9198618000000351, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_pagination": 9.032663469999989, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_pagination_with_deleted": 10.221104253000021, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelPerson::test_breakdown_basic_pagination": 9.409283933999859, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelPerson::test_breakdowns": 1.1256946669998342, "ee/clickhouse/views/test/test_clickhouse_funnel_person.py::TestFunnelCorrelationPersons::test_pagination": 2.089670500999773, "ee/clickhouse/views/test/test_clickhouse_funnel_trends_person.py::TestFunnelTrendsPerson::test_basic_format": 1.2636994459996913, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_basic_results": 0.2775504460000775, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_cohort_without_match_group_works": 0.4303297689998544, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_create_insight_items": 0.259308904999898, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_get_favorited_insight_items": 0.268817546000264, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_get_insight_by_short_id": 0.26308806699989873, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_get_insight_items": 0.2720978520001154, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_get_saved_insight_items": 0.27116900499981966, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_funnels_basic_get": 0.47879737599987493, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_funnels_basic_post": 0.49228406199995334, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_paths_basic": 0.7083831629997803, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_refreshing": 0.6070565049997185, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_retention_basic": 0.49653408000017407, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_trends_allowed_if_project_open_and_org_member": 0.32894644599991807, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_trends_allowed_if_project_private_and_org_member_and_project_member": 0.32450302000029296, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_trends_basic": 0.41227438999976584, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_trends_breakdown_pagination": 0.9765281149998373, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_trends_forbidden_if_project_private_and_org_member": 0.24129158499999903, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_insight_with_specified_token": 0.48289658399971813, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_nonexistent_cohort_is_handled": 0.4262346999998954, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_precalculated_cohort_works": 1.166152067999974, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_save_new_funnel": 0.27401211699975647, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_update_insight": 0.25158690700027364, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestInsights::test_update_insight_filters": 0.005339832999879945, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_basic_exclusions": 0.5926136939999651, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_invalid_action_handled": 0.24875102399983007, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_invalid_exclusions": 0.7764534989998992, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_strict_basic_post": 0.5560231540000586, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_time_to_convert_auto_bins": 3.030403734999936, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_time_to_convert_auto_bins_strict": 2.56589407499996, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_time_to_convert_auto_bins_unordered": 6.546081921999985, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_trends_basic_post": 0.6726990639997439, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_trends_basic_post_backwards_compatibility": 0.6701449209997463, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_trends_strict_basic_post": 0.7131270020001921, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_trends_unordered_basic_post": 1.0597191010001552, "ee/clickhouse/views/test/test_clickhouse_insights.py::ClickhouseTestFunnelTypes::test_funnel_unordered_basic_post": 0.7487043639998774, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format": 0.8736085649998131, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_funnel_path_get": 1.9474917909999476, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_funnel_path_post": 1.9541469329999472, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_path_start_key_constraints": 0.8670138430002226, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_start_point_constraints": 1.036400834999995, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_pagination": 2.3215846449998025, "ee/clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_pagination_with_deleted": 9.344018170000027, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_backwards_compatible_path_types": 1.308208096000044, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_backwards_compatible_start_point": 1.2732035899998664, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_funnel_path_post": 2.038496304000091, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_insight_paths_basic": 0.6002012160001868, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_insight_paths_basic_exclusions": 0.6405020909999166, "ee/clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_path_groupings": 1.209239086000025, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_cant_see_another_organization_pii_with_filters": 0.4052275730000474, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_delete_person": 0.6129477719998704, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_filter_by_cohort": 0.488000435999993, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_filter_id_or_uuid": 0.3056462429999556, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_filter_is_identified": 0.33953578899991044, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_filter_person_list": 0.42098698299992066, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_merge_people": 0.3687332920001154, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_person_cohorts": 0.7607554850001179, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_person_property_names": 0.3039750569998887, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_person_property_values": 0.3549387669997941, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_properties": 0.3662029980002899, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_return_non_anonymous_name": 0.3222630370000843, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_search": 0.3988302009997824, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_split_people_delete_props": 0.419703428000048, "ee/clickhouse/views/test/test_clickhouse_person.py::ClickhouseTestPersonApi::test_split_people_keep_props": 0.3861962799996945, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_default_limit_of_chunks": 0.49980106299994986, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_metadata_for_chunked_session_recording": 12.241319018000013, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_session_recordings": 0.36672320100001343, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_single_session_recording_metadata": 0.3308036099999754, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_snapshots_for_chunked_session_recording": 13.256453979999833, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_request_to_another_teams_endpoint_returns_401": 0.2726631450000241, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_doesnt_exist": 0.28683948700017936, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_for_user_with_multiple_distinct_ids": 0.36879256900033397, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_with_no_person": 0.3091373529998691, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recordings_dont_leak_teams": 0.3165090379998219, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_setting_viewed_state_of_session_recording": 0.6345090530001016, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_single_session_recording_doesnt_leak_teams": 0.3086119250001502, "ee/clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_viewed_state_of_session_recording": 0.32080836199997975, "ee/kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_interface": 0.008750657000064166, "ee/kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_produce": 0.39514470500012067, "ee/kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_produce_and_consume": 0.34149586099965745, "ee/tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_stickiness_cohort": 0.5154557079999904, "ee/tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_trends_cohort": 0.022596962000079657, "ee/tasks/test/test_org_usage_report.py::TestOrganizationUsageReport::test_event_counts": 1.1234756750000088, "ee/tasks/test/test_org_usage_report.py::TestOrganizationUsageReport::test_org_usage_report": 0.07461113699991984, "ee/tasks/test/test_send_license_usage.py::SendLicenseUsageTest::test_send_license_error": 4.880100866000021, "ee/tasks/test/test_send_license_usage.py::SendLicenseUsageTest::test_send_license_usage": 1.126898724000057, "ee/tasks/test/test_send_license_usage.py::SendLicenseUsageNoLicenseTest::test_no_license": 0.36430595899992113, "ee/tasks/test/test_status_report.py::TestStatusReport::test_instance_status_report_event_counts": 2.0849482789999456, "ee/tasks/test/test_status_report.py::TestStatusReport::test_status_report": 0.1445598039995275, "ee/tasks/test/test_status_report.py::TestStatusReport::test_status_report_duplicate_distinct_ids": 0.18582451699990088, "ee/tasks/test/test_status_report.py::TestStatusReport::test_status_report_multiple_ids_per_person": 0.20238136500029213, "ee/tasks/test/test_status_report.py::TestStatusReport::test_status_report_plugins": 0.15370917099994585, "ee/clickhouse/models/test/test_property.py::test_parse_prop_clauses_defaults": 0.03579843300030916, "ee/clickhouse/queries/funnels/test/test_funnel.py::also_test_with_materialized_columns": 0.001733087999809868, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::TestCorrelationFunctions::test_are_results_insignificant": 0.002764541999795256, "ee/clickhouse/queries/funnels/test/test_funnel_correlation.py::also_test_with_materialized_columns": 0.0018324939999274648, "ee/clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::also_test_with_materialized_columns": 0.0014835750000656844, "ee/clickhouse/queries/funnels/test/test_funnel_persons.py::also_test_with_materialized_columns": 0.0018048920001092483, "ee/clickhouse/queries/test/test_breakdown_props.py::also_test_with_materialized_columns": 0.0016624840000076802, "ee/clickhouse/queries/test/test_paths.py::also_test_with_materialized_columns": 0.0028525469999749475, "ee/clickhouse/queries/test/test_trends.py::also_test_with_materialized_columns": 0.0017761909998625924, "ee/clickhouse/test/test_error.py::test_wrap_query_error[error0-AttributeError-Foobar-None]": 0.002195511999843802, "ee/clickhouse/test/test_error.py::test_wrap_query_error[error1-EstimatedQueryExecutionTimeTooLong-Estimated query execution time (34.5 seconds) is too long.-None]": 0.002550129999917772, "ee/clickhouse/test/test_error.py::test_wrap_query_error[error2-CHQueryErrorSyntaxError-Code: 62.\\nSyntax error-62]": 0.0020832059999520425, "ee/clickhouse/test/test_error.py::test_wrap_query_error[error3-CHQueryErrorUnknown-Code: 9999.\\nSyntax error-9999]": 0.17104966100009733, "api/test/test_action.py::TestActionApi::test_actions_does_not_nplus1": 1.5385470399999122, "api/test/test_action.py::TestActionApi::test_create_action_update_delete_tags": 0.19979837799996858, "api/test/test_action.py::TestActionApi::test_create_action_with_tags": 0.13023913199981507, "api/test/test_capture.py::TestCaptureAPI::test_capture_event_with_uuid_in_payload": 0.27399585199998455, "api/test/test_capture.py::TestCaptureAPI::test_determine_team_from_request_data_ch": 0.01760381199994754, "api/test/test_capture.py::TestCaptureAPI::test_kafka_connection_error": 0.027186072999938915, "api/test/test_capture.py::TestCaptureAPI::test_produce_to_kafka": 0.025731164000035278, "api/test/test_capture.py::TestCaptureAPI::test_unable_to_fetch_team": 0.027623076999930163, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_can_edit_restricted_dashboard_as_creator_who_is_project_member": 0.3191895410000143, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_can_edit_restricted_dashboard_as_other_user_who_is_project_admin": 0.07537858199998482, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_can_set_dashboard_to_restrict_editing_as_creator_who_is_project_admin": 0.06313130299997738, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_can_set_dashboard_to_restrict_editing_as_creator_who_is_project_member": 0.06172709499992379, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_can_set_dashboard_to_restrict_editing_as_other_user_who_is_project_admin": 0.08137712000007014, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_cannot_delete_restricted_dashboard_as_other_user_who_is_project_member": 0.07264446400006364, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_cannot_edit_restricted_dashboard_as_other_user_who_is_project_member": 0.0638485080000919, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_cannot_set_dashboard_to_restrict_editing_as_other_user_who_is_project_member": 0.06018178400006491, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_dashboard_restrictions_have_no_effect_without_license": 0.054030046000093535, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_allowed_for_project_member": 0.0719415599999138, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_forbidden_for_org_admin": 0.058855876999928114, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_retrieve_dashboard_forbidden_for_project_outsider": 0.03844774500009862, "api/test/test_dashboard.py::TestDashboardEnterpriseAPI::test_shared_dashboard_in_private_project": 0.07711219300006178, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_can_add_collaborator_to_edit_restricted_dashboard_as_creator": 0.319832045000112, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_can_remove_collaborator_from_restricted_dashboard_as_creator": 0.06164459400008582, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_add_collaborator_from_other_org_to_edit_restricted_dashboard_as_creator": 0.09515990900001725, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_add_collaborator_to_edit_restricted_dashboard_as_other_user": 0.056904363999933594, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_add_collaborator_to_other_org_to_edit_restricted_dashboard_as_creator": 0.08776536200002738, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_add_collaborator_to_unrestricted_dashboard_as_creator": 0.05438244900005884, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_add_yourself_to_restricted_dashboard_as_creator": 0.055766857000094205, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_remove_collaborator_from_restricted_dashboard_as_other_user": 0.055957756999987396, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_remove_collaborator_from_unrestricted_dashboard_as_creator": 0.05737026600002082, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_cannot_update_existing_collaborator": 0.1412758029998713, "api/test/test_dashboard_collaborators.py::TestDashboardCollaboratorsAPI::test_list_collaborators_as_person_without_edit_access": 0.0737501709999151, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_can_get_event_verification_data": 0.46998760500002845, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_cannot_update_verified_meta_properties_directly": 0.15155626899991148, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_retrieve_create_event_definition": 0.059219778999931805, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_retrieve_existing_event_definition": 0.05858937400000741, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_search_event_definition": 0.14544373000001087, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_update_event_definition": 0.08862286700002642, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_update_event_without_license": 0.04458428599991748, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_verify_then_unverify": 0.2303894730000593, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_verify_then_verify_again_no_change": 0.2604264650001369, "api/test/test_event_definition.py::TestEventDefinitionEnterpriseAPI::test_with_expired_license": 0.059642080999879, "api/test/test_hooks.py::TestHooksAPI::test_create_hook": 0.4153403550001258, "api/test/test_hooks.py::TestHooksAPI::test_create_hook_with_resource_id": 0.24747178199993414, "api/test/test_hooks.py::TestHooksAPI::test_delete_hook": 0.25289331700003004, "api/test/test_insight.py::TestInsightEnterpriseAPI::test_cannot_delete_restricted_insight_as_other_user_who_is_project_member": 0.3030126390000305, "api/test/test_insight.py::TestInsightEnterpriseAPI::test_cannot_update_restricted_insight_as_other_user_who_is_project_member": 0.0702460490000476, "api/test/test_instance_settings.py::TestInstanceSettings::test_update_recordings_ttl_setting": 0.3128081999998358, "api/test/test_license.py::TestLicenseAPI::test_can_create_license": 0.27782517699995424, "api/test/test_license.py::TestLicenseAPI::test_can_list_and_retrieve_licenses": 0.03738143900011437, "api/test/test_license.py::TestLicenseAPI::test_friendly_error_when_license_key_is_invalid": 0.03446832000008726, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_create_organization": 0.291186062999941, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_create_two_similarly_named_organizations": 0.11064870799998516, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_last_organization": 1.0135875830000032, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_organization_owning": 0.9539744009999822, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_delete_second_managed_organization": 0.9664851800000633, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_has_license": 0.025380661999975018, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_license_expired": 0.056076458000006824, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_feature_available_self_hosted_no_license": 0.017713213000092765, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_delete_organization_not_belonging_to": 0.07732569400002376, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_delete_organization_not_owning": 0.045771792999971694, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_no_update_organization_not_belonging_to": 0.08863866599995163, "api/test/test_organization.py::TestOrganizationEnterpriseAPI::test_update_org": 0.17792233899990606, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_can_set_and_query_property_type_and_format": 0.297793805000083, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_errors_on_invalid_property_type": 0.019766226000001552, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_filter_property_definitions": 0.07105585399983738, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_retrieve_create_property_definition": 0.06638952500009054, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_retrieve_existing_property_definition": 0.06487501499998416, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_search_property_definition": 0.2106401469999355, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_update_property_definition": 0.0785092020000775, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_update_property_without_license": 0.04914871400001175, "api/test/test_property_definition.py::TestPropertyDefinitionEnterpriseAPI::test_with_expired_license": 0.049958920000108265, "api/test/test_tagged_item.py::TestEnterpriseTaggedItemSerializerMixin::test_create_and_update_object_with_tags": 0.36589083999990635, "api/test/test_tagged_item.py::TestEnterpriseTaggedItemSerializerMixin::test_create_with_tags": 0.08318813200003206, "api/test/test_tagged_item.py::TestEnterpriseTaggedItemSerializerMixin::test_get_tags": 0.06868003900012809, "api/test/test_tagged_item.py::TestEnterpriseTaggedItemSerializerMixin::test_resolve_overlapping_tags_on_update": 0.13269314799981657, "api/test/test_team.py::TestProjectEnterpriseAPI::test_can_update_and_retrieve_person_property_names_excluded_from_correlation": 0.28547822500001985, "api/test/test_team.py::TestProjectEnterpriseAPI::test_create_project": 0.2976090040000372, "api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_open_team_as_org_member_but_project_admin_forbidden": 0.25120040699994206, "api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_private_team_as_org_member_but_project_admin_allowed": 1.1634538400002157, "api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_second_team_as_org_admin_allowed": 1.139140483999995, "api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_team_as_org_admin_allowed": 1.1330490459999965, "api/test/test_team.py::TestProjectEnterpriseAPI::test_delete_team_as_org_member_forbidden": 0.2597258609999926, "api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_admin_allowed": 0.2670117080000409, "api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_member_and_project_admin_forbidden": 0.2484690880000926, "api/test/test_team.py::TestProjectEnterpriseAPI::test_disable_access_control_as_org_member_forbidden": 0.23870212499991794, "api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_admin_allowed": 0.2587440529999867, "api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_member_and_project_admin_forbidden": 0.24982839599999807, "api/test/test_team.py::TestProjectEnterpriseAPI::test_enable_access_control_as_org_member_forbidden": 0.2531332169999132, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_nonexistent_team": 0.24415415999999368, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member": 0.25203671099984604, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member_and_project_admin": 0.2721469390000948, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_private_team_as_org_member_and_project_member": 0.26472059100012757, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_admin_works": 0.24244704899990666, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_member_works": 0.25422212499995567, "api/test/test_team.py::TestProjectEnterpriseAPI::test_fetch_team_as_org_outsider": 0.2528684159999557, "api/test/test_team.py::TestProjectEnterpriseAPI::test_list_teams_restricted_ones_hidden": 0.2949890850001111, "api/test/test_team.py::TestProjectEnterpriseAPI::test_no_delete_team_not_administrating_organization": 0.26110486900006435, "api/test/test_team.py::TestProjectEnterpriseAPI::test_no_delete_team_not_belonging_to_organization": 0.5357178230000272, "api/test/test_team.py::TestProjectEnterpriseAPI::test_non_admin_cannot_create_project": 0.24182944500000758, "api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_as_org_member_and_project_member_allowed": 0.276011363000066, "api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_as_org_member_forbidden": 0.24782418300003428, "api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_current_as_org_outsider_forbidden": 0.25099710400002095, "api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_private_project_id_as_org_outsider_forbidden": 0.24668497600009687, "api/test/test_team.py::TestProjectEnterpriseAPI::test_rename_project_as_org_member_allowed": 0.2717315359999475, "api/test/test_team.py::TestProjectEnterpriseAPI::test_user_that_does_not_belong_to_an_org_cannot_create_a_project": 0.24225644800014834, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_org_admin_allowed": 0.2793525839999802, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_project_admin_allowed": 0.2600340619999315, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_admin_as_project_member_forbidden": 0.2463045739999643, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_admin_allowed": 0.26004586199996993, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_admin_and_project_member_allowed": 0.2666466030000265, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_and_project_member_forbidden": 0.25819924999996147, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_but_project_admin_allowed": 0.2616272720000552, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_member_forbidden": 0.2527792150000323, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_as_org_owner_allowed": 0.27260894000016833, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_non_current_project_allowed": 0.26557099499996184, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_non_private_project_forbidden": 0.2503179979999004, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_nonexistent_project_forbidden": 0.24157024199985244, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_project_in_outside_organization_forbidden": 0.27633936399990944, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_member_to_project_that_is_not_organization_member_forbidden": 0.2887145429999691, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_yourself_as_org_admin_forbidden": 0.2496074939999744, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_add_yourself_as_org_member_forbidden": 0.25026989799994226, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_demote_yourself_as_org_member_and_project_admin_forbidden": 0.26032516199984457, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_admin_allowed": 0.23874632400008977, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_admin_member": 0.24584666900011598, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_organization_outsider": 0.24233914700005244, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_leave_project_as_project_outsider": 0.24035623499992198, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_admin_allowed": 0.2596780570000874, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_member_allowed": 0.25389352100012275, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_remove_member_as_org_member_but_project_admin_allowed": 0.2645233890001464, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_current_project_no_access": 0.3648177290000376, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_member_but_project_admin_allowed": 0.2809614939999392, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_member_forbidden": 0.2503241979999302, "api/test/test_team_memberships.py::TestTeamMembershipsAPI::test_set_level_of_member_to_admin_as_org_owner_allowed": 0.26769050900009006, "clickhouse/materialized_columns/test/test_analyze.py::TestMaterializedColumnsAnalyze::test_query_class": 0.2929962700000033, "clickhouse/materialized_columns/test/test_analyze.py::TestMaterializedColumnsAnalyze::test_query_class_edge_cases": 0.27079402800006847, "clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_backfilling_data": 4.971888362999948, "clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_caching_and_materializing": 3.095315684999946, "clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_column_types": 2.9015278380001064, "clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_get_columns_default": 2.193842987000153, "clickhouse/materialized_columns/test/test_columns.py::TestMaterializedColumns::test_materialized_column_naming": 2.9913270799999054, "clickhouse/models/test/test_action.py::TestActions::test_attributes": 0.2988137449999613, "clickhouse/models/test/test_action.py::TestActions::test_empty_selector_same_as_null": 0.29273630599993794, "clickhouse/models/test/test_action.py::TestActions::test_filter_events_by_url": 0.41622951100009686, "clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_direct_decendant_ordering": 0.3384301040000537, "clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_id": 0.3254719199999272, "clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_nested": 0.33171846000016103, "clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_nth_child": 0.3160601490001227, "clickhouse/models/test/test_action.py::TestActions::test_filter_with_selector_star": 0.3236635309999656, "clickhouse/models/test/test_action.py::TestActions::test_no_person_leakage_from_other_teams": 0.34063595300006, "clickhouse/models/test/test_action.py::TestActions::test_no_steps": 0.2719697590000578, "clickhouse/models/test/test_action.py::TestActions::test_person_property": 0.36889805700013767, "clickhouse/models/test/test_action.py::TestActions::test_person_with_different_distinct_id": 0.3076948140000013, "clickhouse/models/test/test_action.py::TestActions::test_with_class": 0.30125937000002523, "clickhouse/models/test/test_action.py::TestActions::test_with_class_with_escaped_slashes": 0.2782861049998928, "clickhouse/models/test/test_action.py::TestActions::test_with_class_with_escaped_symbols": 0.2756813859999738, "clickhouse/models/test/test_action.py::TestActions::test_with_normal_filters": 0.35708307100003367, "clickhouse/models/test/test_action.py::TestActionFormat::test_double": 0.3136324599998943, "clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_contains_url": 0.2865217639999855, "clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_exact_url": 0.2793797120000363, "clickhouse/models/test/test_action.py::TestActionFormat::test_filter_event_regex_url": 0.2767224939999551, "clickhouse/models/test/test_cohort.py::TestCohort::test_clickhouse_empty_query": 0.5543252930000335, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohort_change": 0.9782817470000964, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohort_get_person_ids_by_cohort_id": 0.3894971050001459, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_action_basic": 0.7807200240000611, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_action_count": 2.2720838269999604, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_basic": 0.6589882369999032, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_basic_paginating": 41.78915190299995, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_deleted_person": 0.9414504290000423, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_prop_changed": 1.0530305590000353, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_timestamp": 0.5659470409998448, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_cyclic_cohort_filter": 0.5761093540002094, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_nonexistent_other_cohort_filter": 0.5895291050001106, "clickhouse/models/test/test_cohort.py::TestCohort::test_cohortpeople_with_valid_other_cohort_filter": 1.4936825730001146, "clickhouse/models/test/test_cohort.py::TestCohort::test_insert_by_distinct_id_or_email": 0.8271157049998692, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic": 0.4282670170000529, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_action": 0.382926007000151, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_action_days": 0.43042934300001434, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_basic_event_days": 0.41711029200018856, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_multiple_groups": 0.3619348719998925, "clickhouse/models/test/test_cohort.py::TestCohort::test_prop_cohort_with_negation": 0.35164425499965546, "clickhouse/models/test/test_cohort.py::TestCohort::test_static_cohort_precalculated": 0.6329738210001778, "clickhouse/models/test/test_dead_letter_queue.py::TestDeadLetterQueue::test_direct_table_insert": 0.3093282789998284, "clickhouse/models/test/test_dead_letter_queue.py::TestDeadLetterQueue::test_kafka_insert": 2.705604937000089, "clickhouse/models/test/test_element.py::TestClickhouseElement::test_broken_class_names": 0.22932417999982135, "clickhouse/models/test/test_element.py::TestClickhouseElement::test_elements_to_string": 0.21458290800001123, "clickhouse/models/test/test_filters.py::TestFilters::test_old_style_properties": 0.2284083089998603, "clickhouse/models/test/test_filters.py::TestFilters::test_recursive_cohort": 0.012707051000006686, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_cohorts": 0.4420695439998781, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_entities": 0.008221897000112222, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_entities_with_group_math": 0.00597537099974943, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_funnel_entities_when_aggregating_by_group": 0.0047904560001370555, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_hasdone_cohort": 0.00756629000011344, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_multi_group_cohort": 0.007232887000100163, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_no_such_cohort": 0.005863469999894733, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_static_cohort": 0.007087383999987651, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_test_accounts": 0.006433877999825199, "clickhouse/models/test/test_filters.py::TestFilters::test_simplify_when_aggregating_by_group": 0.004580554000085613, "clickhouse/models/test/test_filters.py::TestFilters::test_to_dict": 0.006405177000033291, "clickhouse/models/test/test_filters.py::TestFiltering::test_boolean_filters": 0.27473465899993244, "clickhouse/models/test/test_filters.py::TestFiltering::test_boolean_filters_persons": 0.3048028149999027, "clickhouse/models/test/test_filters.py::TestFiltering::test_contains": 0.26397233100010453, "clickhouse/models/test/test_filters.py::TestFiltering::test_contains_persons": 0.3044517120004002, "clickhouse/models/test/test_filters.py::TestFiltering::test_does_not_contain": 0.2898376369998914, "clickhouse/models/test/test_filters.py::TestFiltering::test_does_not_contain_persons": 0.376624166000056, "clickhouse/models/test/test_filters.py::TestFiltering::test_element_filter": 0.2844423739998092, "clickhouse/models/test/test_filters.py::TestFiltering::test_element_selectors": 0.2531651020001391, "clickhouse/models/test/test_filters.py::TestFiltering::test_filter_out_team_members": 0.36418391899997005, "clickhouse/models/test/test_filters.py::TestFiltering::test_filter_out_team_members_persons": 0.3151888370000506, "clickhouse/models/test/test_filters.py::TestFiltering::test_incomplete_data": 0.2091799820000233, "clickhouse/models/test/test_filters.py::TestFiltering::test_invalid_regex": 0.27116781600011564, "clickhouse/models/test/test_filters.py::TestFiltering::test_invalid_regex_persons": 0.3187159810001958, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not": 0.288168818000031, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_persons": 0.30139687400014736, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_set_and_is_set": 0.27984402000015507, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_set_and_is_set_persons": 0.3057095260001006, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_true_false": 0.27038610800013885, "clickhouse/models/test/test_filters.py::TestFiltering::test_is_not_true_false_persons": 0.2919430629999624, "clickhouse/models/test/test_filters.py::TestFiltering::test_json_object": 0.3149137350001183, "clickhouse/models/test/test_filters.py::TestFiltering::test_multiple": 0.2803240239998104, "clickhouse/models/test/test_filters.py::TestFiltering::test_multiple_equality": 0.30460111299976234, "clickhouse/models/test/test_filters.py::TestFiltering::test_multiple_equality_persons": 0.4215305000002445, "clickhouse/models/test/test_filters.py::TestFiltering::test_multiple_persons": 0.3162789799998791, "clickhouse/models/test/test_filters.py::TestFiltering::test_numerical": 0.32342752399972596, "clickhouse/models/test/test_filters.py::TestFiltering::test_numerical_person_properties": 0.41265487500004383, "clickhouse/models/test/test_filters.py::TestFiltering::test_person_cohort_properties": 0.38604161300008855, "clickhouse/models/test/test_filters.py::TestFiltering::test_regex": 0.28057177800019417, "clickhouse/models/test/test_filters.py::TestFiltering::test_regex_persons": 0.335300031000088, "clickhouse/models/test/test_filters.py::TestFiltering::test_simple": 0.29235738399984257, "clickhouse/models/test/test_filters.py::TestFiltering::test_simple_persons": 0.35399515600011, "clickhouse/models/test/test_filters.py::TestFiltering::test_true_false": 0.27782483100008903, "clickhouse/models/test/test_filters.py::TestFiltering::test_user_properties": 0.46952276399974835, "clickhouse/models/test/test_filters.py::TestFiltering::test_user_properties_numerical": 0.4053115249998882, "clickhouse/models/test/test_filters.py::PGTestFilters::test_old_style_properties": 0.2377099709999584, "clickhouse/models/test/test_filters.py::PGTestFilters::test_simplify_test_accounts": 0.00769589000015003, "clickhouse/models/test/test_filters.py::PGTestFilters::test_to_dict": 0.0074164850000215665, "clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_log_limit_works": 0.2611102429998482, "clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_log_search_works": 0.033543089999739095, "clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_log_type_filter_works": 0.04158598499998334, "clickhouse/models/test/test_plugin_log_entry.py::TestEvent::test_simple_log_is_fetched": 0.030906861000175923, "clickhouse/models/test/test_property.py::TestPropFormat::test_parse_groups": 0.39470870000013747, "clickhouse/models/test/test_property.py::TestPropFormat::test_parse_groups_invalid_type": 0.22066217000019606, "clickhouse/models/test/test_property.py::TestPropFormat::test_parse_groups_persons": 0.6529506100000617, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_decimals": 0.38424777800014454, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_element": 0.4768091559997174, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_element_with_space": 0.2467280739999751, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_event": 0.3307820540001103, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_ints_saved_as_strings": 0.41509643799986407, "clickhouse/models/test/test_property.py::TestPropFormat::test_prop_person": 0.372027435000291, "clickhouse/models/test/test_property.py::TestPropDenormalized::test_get_property_string_expr": 0.558604196000033, "clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_event_denormalized": 0.8733003930003633, "clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_event_denormalized_ints": 0.3213774989999365, "clickhouse/models/test/test_property.py::TestPropDenormalized::test_prop_person_denormalized": 0.44944057299971973, "clickhouse/models/test/test_property.py::test_parse_prop_clauses_defaults": 0.011038910999786822, "clickhouse/models/test/test_property.py::test_parse_groups_persons_edge_case_with_single_filter": 0.0219223190001685, "clickhouse/models/test/test_property.py::test_breakdown_query_expression[$browser-events-prop-trim(BOTH '\"' FROM JSONExtractRaw(properties, '$browser')) AS prop]": 0.018359583999654205, "clickhouse/models/test/test_property.py::test_breakdown_query_expression[breakdown1-events-value-array(trim(BOTH '\"' FROM JSONExtractRaw(properties, '$browser'))) AS value]": 0.018461683000168705, "clickhouse/models/test/test_property.py::test_breakdown_query_expression[breakdown2-events-prop-array(trim(BOTH '\"' FROM JSONExtractRaw(properties, '$browser')),trim(BOTH '\"' FROM JSONExtractRaw(properties, '$browser_version'))) AS prop]": 0.033035830000017086, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property0-expected_event_indexes0]": 0.6079174520000379, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property1-expected_event_indexes1]": 0.6169437419998758, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property2-expected_event_indexes2]": 0.6309393799997451, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching a number only matches event index 4 from test_events]": 0.6085936580002453, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching on email is not a value matches all but the first event from test_events]": 0.6035813080000025, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching on email is not a value matches all but the first two events from test_events]": 0.6022713939998994, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property6-expected_event_indexes6]": 0.634538116000158, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property7-expected_event_indexes7]": 0.6228927910001403, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property8-expected_event_indexes8]": 0.6266716119998819, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching for email property not being set matches all but the first two events from test_events]": 0.6364778209999713, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching before a unix timestamp only querying by date]": 0.6187848100000792, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching after a unix timestamp only querying by date]": 0.6239442730000064, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching before a unix timestamp querying by date and time]": 0.6478049569998348, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching after a unix timestamp querying by date and time]": 0.634974005000231, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property14-expected_event_indexes14]": 0.6089837939998688, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[match after date only value against date and time formatted property]": 0.6479464600001847, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property16-expected_event_indexes16]": 0.6338227900002948, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property17-expected_event_indexes17]": 0.6584334850001596, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property18-expected_event_indexes18]": 0.6399794660001135, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property19-expected_event_indexes19]": 0.6415282810000917, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property20-expected_event_indexes20]": 0.6605258090000916, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[property21-expected_event_indexes21]": 0.6297912429999997, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching a unix timestamp in seconds with fractional seconds after the decimal point]": 0.6040659119998963, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching unix timestamp in milliseconds after a given date (which ClickHouse doesn't support)]": 0.6100420219997886, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching unix timestamp in milliseconds before a given date (which ClickHouse doesn't support)]": 0.6248679820000689, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching rfc 822 format date with timeszone offset before a given date]": 0.614944443000013, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching rfc 822 format date takes into account timeszone offset after a given date]": 0.6109687260000101, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching rfc 822 format date after a given date]": 0.6219304710000415, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching ISO 8601 format date before a given date]": 0.61399289100018, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching ISO 8601 format date after a given date]": 0.6115532620001431, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts n increasing order before a given date]": 0.6090735329999006, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts in increasing order after a given date]": 0.6043441799999982, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts separated by slashes before a given date]": 0.6590524119999372, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts separated by slashes after a given date]": 0.6034888700003194, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts increasing in size and separated by slashes before a given date]": 0.6161884159998863, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[matching full format date with date parts increasing in size and separated by slashes after a given date]": 0.5963952780000454, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match dates exactly]": 0.6595950709995577, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match dates exactly against datetimes and unix timestamps]": 0.6245846419999452, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match date times exactly against datetimes with milliseconds]": 0.6236234319999312, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match date only filter after against datetime with milliseconds]": 0.6123217929998646, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match after date only values]": 0.6241406379999717, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract[can match before date only values]": 0.6243780400000105, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property0-expected_event_indexes0]": 1.1464682299999822, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property1-expected_event_indexes1]": 0.6845689780000157, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property2-expected_event_indexes2]": 0.6881973219999509, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching a number only matches event index 4 from test_events]": 0.6706528070001241, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching on email is not a value matches all but the first event from test_events]": 0.67468018999989, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching on email is not a value matches all but the first two events from test_events]": 0.681788294999933, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property6-expected_event_indexes6]": 0.7114959349996752, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property7-expected_event_indexes7]": 0.6559901010002704, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property8-expected_event_indexes8]": 0.6601816489999237, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching for email property not being set matches all but the first two events from test_events]": 0.650861242000019, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching before a unix timestamp only querying by date]": 0.953866405999861, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching after a unix timestamp only querying by date]": 0.673482500000091, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching before a unix timestamp querying by date and time]": 0.6583991289999176, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching after a unix timestamp querying by date and time]": 0.6956765529998847, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property14-expected_event_indexes14]": 0.9202850209999269, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[match after date only value against date and time formatted property]": 0.6765595920001033, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property16-expected_event_indexes16]": 0.682421387000204, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property17-expected_event_indexes17]": 0.6825384879996363, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property18-expected_event_indexes18]": 0.9289840090000325, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property19-expected_event_indexes19]": 0.6717013590000533, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property20-expected_event_indexes20]": 0.6676615110000057, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[property21-expected_event_indexes21]": 0.666431096999986, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching a unix timestamp in seconds with fractional seconds after the decimal point]": 0.922171728000194, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching unix timestamp in milliseconds after a given date (which ClickHouse doesn't support)]": 0.9374720090002029, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching unix timestamp in milliseconds before a given date (which ClickHouse doesn't support)]": 0.6901432790002673, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching rfc 822 format date with timeszone offset before a given date]": 0.9332216579998658, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching rfc 822 format date takes into account timeszone offset after a given date]": 0.7222106659999099, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching rfc 822 format date after a given date]": 0.6790922369998498, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching ISO 8601 format date before a given date]": 0.9468420860000606, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching ISO 8601 format date after a given date]": 0.686128017000101, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts n increasing order before a given date]": 0.9375744810001834, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts in increasing order after a given date]": 0.7494236400000318, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts separated by slashes before a given date]": 0.9613799530000051, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts separated by slashes after a given date]": 0.6890976509998836, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts increasing in size and separated by slashes before a given date]": 0.9825866950000091, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[matching full format date with date parts increasing in size and separated by slashes after a given date]": 0.7222404290000668, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match dates exactly]": 0.9492255809998369, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match dates exactly against datetimes and unix timestamps]": 0.9698335149998911, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match date times exactly against datetimes with milliseconds]": 1.018083261999891, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match date only filter after against datetime with milliseconds]": 0.7090631539999777, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match after date only values]": 0.7670715120000295, "clickhouse/models/test/test_property.py::test_prop_filter_json_extract_materialized[can match before date only values]": 0.7214689940001335, "clickhouse/models/test/test_team.py::TestDeleteEvents::test_delete_cohorts": 3.725890371999867, "clickhouse/models/test/test_team.py::TestDeleteEvents::test_delete_events": 3.047647718000235, "clickhouse/models/test/test_team.py::TestDeleteEvents::test_delete_groups": 3.0242900459998054, "clickhouse/models/test/test_team.py::TestDeleteEvents::test_delete_persons": 3.060555454999985, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 0.7822757929998261, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.4305942460000551, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 0.687951208999948, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_aggregate_by_groups_breakdown_group": 1.6083053880001899, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_breakdown_group": 5.5452675590001945, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_cohort_breakdown": 3.9979042740001205, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_cohort_breakdown_materialized": 5.507798778999813, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event": 2.0199801610001487, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_materialized": 3.374047238000003, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_no_type": 2.005498108999973, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 3.272468886000297, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 1.5397863310001867, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.2960788390000744, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 2.11584652800002, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_other": 2.1195055689997844, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 3.3391058800000337, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown": 2.0670073260000663, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown_materialized": 3.2149403460000485, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_limit": 2.9630569439998453, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_limit_materialized": 4.756527496999979, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_person": 2.7164273569999295, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_breakdown_person_materialized": 3.8988054079998165, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 1.5111044989996572, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 3.2056975379998676, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 4.933803905000104, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 2.4455181629998606, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_multi_property_breakdown_event": 2.836137966999786, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelBreakdown::test_funnel_step_multi_property_breakdown_event_materialized": 4.727276307000238, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_step_conversion_times": 0.6599493610001446, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_times_with_different_conversion_windows": 1.5845424330000242, "clickhouse/queries/funnels/test/test_funnel.py::TestFunnelConversionTime::test_funnel_with_multiple_incomplete_tries": 0.8317731799998, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_exclusions_between_steps": 5.9301728099997035, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_multiple_exclusions_between_steps": 5.190497372999971, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_with_repeat_steps": 3.575092067999776, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_advanced_funnel_with_repeat_steps_out_of_order_events": 3.661283586000309, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_default_funnel_days": 0.4623547660000895, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_derivative_steps": 0.9353196410002056, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_derivative_steps_materialized": 1.5653550729998642, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_repeat_step_updated_param": 1.1833059870000397, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_basic_funnel_with_repeat_steps": 0.8670526570003858, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_breakdown_values_is_set_on_the_query_with_fewer_than_two_entities": 0.4343659869998646, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_conversion_window": 2.237267688999964, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_default": 0.4969645080002465, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_events": 0.8837360699999408, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusion_no_end_event": 1.079551181999932, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_full_window": 1.026446567999983, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_invalid_params": 0.23215898500006915, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_with_actions": 1.144367431000319, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_exclusions_with_actions_materialized": 1.8673620879999362, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_by_action_with_person_properties": 0.8271193620003032, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_by_action_with_person_properties_materialized": 1.0735064119996878, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_test_accounts": 0.5804153130000032, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_filter_test_accounts_materialized": 0.7500228820001666, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_multiple_actions": 0.6200668779999887, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_multiple_actions_materialized": 1.093523360000063, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_no_events": 0.2254575089998525, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_person_prop": 0.7718295340000623, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_person_prop_materialized": 0.9973361449997356, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters": 0.744536620999952, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_materialized": 1.2912510480000492, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_per_entity": 0.8439071689999764, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_prop_filters_per_entity_materialized": 1.4319459939997614, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_skipped_step": 0.6106779130000177, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions": 1.0563525309999022, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_and_events": 0.002528428999994503, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_and_events_materialized": 0.5409396130000914, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_actions_materialized": 1.7551921570002378, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_cohorts_step_filter": 1.3161867170001642, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_denormalised_properties": 0.7510695259998101, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_elements_chain": 1.3938436150003781, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_entity_person_property_filters": 0.5852809369996521, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_entity_person_property_filters_materialized": 0.8397840669997549, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_matching_properties": 4.106245068000135, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_matching_properties_materialized": 6.036297158999787, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_precalculated_cohort_step_filter": 1.502701049000052, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_single_step": 0.4561229360001562, "clickhouse/queries/funnels/test/test_funnel.py::TestClickhouseFunnel::test_funnel_with_static_cohort_step_filter": 0.9764667089998511, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_action_events_are_excluded_from_correlations": 1.6163770519997342, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_events": 4.600307396999597, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_properties": 5.546551295999734, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_basic_funnel_correlation_with_properties_materialized": 6.885209079000106, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_multiple_properties": 3.547724621000043, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_multiple_properties_materialized": 5.119495246000042, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_correlation_with_properties_raises_validation_error": 0.29834234200006904, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_discarding_insignificant_events": 1.8388061219998235, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_events_within_conversion_window_for_correlation": 0.7433576779999385, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties": 3.220979678000049, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_and_groups": 3.421383526000227, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_and_groups_materialized": 5.1097203729998455, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_autocapture": 2.6696261290001075, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_autocapture_materialized": 4.361194937000164, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_exclusions": 1.6586491949997253, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_event_properties_materialized": 5.348084824999887, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_events_and_groups": 7.864483298999858, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_properties_and_groups": 6.620825958000296, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_funnel_correlation_with_properties_and_groups_materialized": 8.214822424999966, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestClickhouseFunnelCorrelation::test_no_divide_by_zero_errors": 0.8341786480000337, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_basic_funnel_correlation_with_events": 3.838996248000285, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_create_funnel_correlation_cohort": 2.3665549949998876, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_funnel_correlation_on_event_with_recordings": 3.5324025419997724, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_funnel_correlation_on_properties_with_recordings": 1.4507248160000472, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_people_arent_returned_multiple_times": 0.6297126120000485, "clickhouse/queries/funnels/test/test_funnel_correlation_persons.py::TestClickhouseFunnelCorrelationActors::test_strict_funnel_correlation_with_recordings": 2.559967122999751, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_basic_offset": 9.108401072999868, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step": 2.4926462749997427, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdown_person": 2.914188961000036, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdown_person_materialized": 4.276264803000004, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdowns": 1.4289740989997881, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdowns_materialized": 2.3658417350000036, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_first_step_breakdowns_with_multi_property_breakdown": 1.533098984000162, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_funnel_cohort_breakdown_persons": 0.7060965979997036, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_funnel_cohort_breakdown_persons_materialized": 1.219194183999889, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_funnel_person_recordings": 4.5400050700000065, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_last_step": 2.2818017430001873, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_last_step_dropoff": 2.4398935609999626, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_second_step_dropoff": 2.4470345830002316, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_are_equivalent_to_funnel_step": 5.46171497499995, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_overrides_funnel_step": 2.3755130880001616, "clickhouse/queries/funnels/test/test_funnel_persons.py::TestFunnelPersons::test_steps_with_custom_steps_parameter_where_funnel_step_equivalence_isnt_possible": 3.627552442000251, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 0.7495378410001194, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.3626300290002291, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 0.7160528720000912, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_aggregate_by_groups_breakdown_group": 1.5931643850001365, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_breakdown_group": 4.54438227300011, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_cohort_breakdown": 4.349313850000044, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_cohort_breakdown_materialized": 5.126916831999779, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event": 1.7950467050000043, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_materialized": 2.822080638999978, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_no_type": 1.8133615169999757, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 2.879918100000168, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 1.512550580000152, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.4361708080000426, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 2.304778130999921, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_other": 1.8328721320001478, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 3.004101483999875, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown": 1.763694328999918, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown_materialized": 2.979596810999965, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_limit": 2.8132434560002366, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_limit_materialized": 4.645683301999952, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_person": 2.3058552509999117, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_breakdown_person_materialized": 3.4646833909998804, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 1.379202059000363, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 2.7685065130001476, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 4.684304588000032, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 2.3344074489998548, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_multi_property_breakdown_event": 2.4793493049999142, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_funnel_step_multi_property_breakdown_event_materialized": 4.207911139999851, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsBreakdown::test_strict_breakdown_events_with_multiple_properties": 1.5766085950001525, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_step_conversion_times": 0.6149737180003285, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_times_with_different_conversion_windows": 1.4428837680002289, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictStepsConversionTime::test_funnel_with_multiple_incomplete_tries": 0.7225563459999194, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_advanced_strict_funnel": 2.6134847269997863, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_basic_strict_funnel": 1.6607829540000694, "clickhouse/queries/funnels/test/test_funnel_strict.py::TestFunnelStrictSteps::test_basic_strict_funnel_conversion_times": 1.2292291329997624, "clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_first_step": 2.296423017000052, "clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_second_step": 2.1474556160001157, "clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_second_step_dropoff": 2.1625305879999814, "clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_strict_funnel_person_recordings": 3.5188653759998942, "clickhouse/queries/funnels/test/test_funnel_strict_persons.py::TestFunnelStrictStepsPersons::test_third_step": 2.0835885869998947, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_single_step": 2.5772637250001935, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_single_step_duplicate_events": 0.002426427000273179, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_auto_bin_count_total": 4.31286035800008, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_basic_strict": 2.3168602619998637, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_basic_unordered": 5.60864835700022, "clickhouse/queries/funnels/test/test_funnel_time_to_convert.py::TestFunnelTrends::test_custom_bin_count_single_step": 1.9253001889999268, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_all_date_range": 1.0679053960000147, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_all_results_for_day_interval": 0.8982599549999577, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_day_interval": 0.8069012949999887, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_from_second_step": 0.7320546420000937, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_step_breakdown_event": 0.7298650160000761, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_step_breakdown_person": 0.9575054110000565, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_funnel_trend_cohort_breakdown": 0.9841029130002426, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_hour_interval": 0.5078924869999355, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_month_interval": 0.8344737100001112, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_no_event_in_period": 0.5546642199999496, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows": 1.1495319990001462, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows_in_strict_funnel": 0.5916420429998652, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_one_person_in_multiple_periods_and_windows_in_unordered_funnel": 2.1021423269999104, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_only_one_user_reached_one_step": 1.298592218000067, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_period_not_final": 0.5681713399999353, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_steps_performed_in_period_but_in_reverse": 0.5510568449999482, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_to_second_step": 0.6902017239999623, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_two_runs_by_single_user_in_one_period": 0.5667538240002159, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_week_interval": 0.8349170629999207, "clickhouse/queries/funnels/test/test_funnel_trends.py::TestFunnelTrends::test_window_size_one_day": 0.901621619000025, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action": 0.7634423929998775, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_action_materialized": 1.3647311220001939, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_basic_funnel_default_funnel_days_breakdown_event": 0.6912151000001359, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_aggregate_by_groups_breakdown_group": 3.4105473849999726, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_breakdown_group": 14.462301069999967, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_cohort_breakdown": 7.627433540000311, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_cohort_breakdown_materialized": 10.432332738000241, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event": 4.080008904000124, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_materialized": 6.435513755999864, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_no_type": 4.349213960000043, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_no_type_materialized": 6.414773887999672, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_events_with_multiple_properties": 2.543372255999884, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns": 1.509714688000031, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_single_person_multiple_breakdowns_materialized": 2.318202071000087, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_other": 4.332144545999881, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_other_materialized": 6.480769372000168, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown": 4.08813578399986, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_event_with_string_only_breakdown_materialized": 6.616847513000039, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_limit": 3.3800425200001882, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_limit_materialized": 5.39041776199997, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_person": 6.131225319999885, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_breakdown_person_materialized": 8.627335468000183, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls": 2.300822634000042, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included": 6.440125032999958, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_included_materialized": 9.456629758999952, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_custom_breakdown_limit_with_nulls_materialized": 3.6358321180002804, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_multi_property_breakdown_event": 6.256030123999835, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsBreakdown::test_funnel_step_multi_property_breakdown_event_materialized": 9.48903565899991, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_step_conversion_times": 0.9614129660001254, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_times_with_different_conversion_windows": 2.551640565000298, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedStepsConversionTime::test_funnel_with_multiple_incomplete_tries": 1.3951392479998503, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_advanced_funnel_multiple_exclusions_between_steps": 9.112437646999979, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_basic_unordered_funnel": 4.173162166999873, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_basic_unordered_funnel_conversion_times": 2.6952539869998873, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_big_multi_step_unordered_funnel": 5.000847975999704, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_funnel_exclusions_full_window": 1.5395832749998135, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_funnel_exclusions_invalid_params": 0.23409100200001376, "clickhouse/queries/funnels/test/test_funnel_unordered.py::TestFunnelUnorderedSteps::test_single_event_unordered_funnel": 0.5164445130001241, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_first_step": 2.764960432999942, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_invalid_steps": 0.34054918400011047, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_last_step": 2.449346109000089, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_last_step_dropoff": 2.4558980499998597, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_second_step_dropoff": 2.5277033079999, "clickhouse/queries/funnels/test/test_funnel_unordered_persons.py::TestFunnelUnorderedStepsPersons::test_unordered_funnel_does_not_return_recordings": 3.426326071999938, "clickhouse/queries/funnels/test/test_utils.py::TestGetFunnelOrderClass::test_filter_missing_order": 0.2188882050002121, "clickhouse/queries/funnels/test/test_utils.py::TestGetFunnelOrderClass::test_ordered": 0.004179027999953178, "clickhouse/queries/funnels/test/test_utils.py::TestGetFunnelOrderClass::test_strict": 0.005254933999822242, "clickhouse/queries/funnels/test/test_utils.py::TestGetFunnelOrderClass::test_unordered": 0.005853037000179029, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_chunked_snapshots": 8.100585236000143, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_chunked_snapshots_with_specific_limit_and_offset": 4.386596007999742, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata": 3.2898731759996735, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata_does_not_leak_teams": 0.3159621249999418, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_metadata_for_non_existant_session_id": 0.25849885699994957, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots": 0.29906351599993286, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots_does_not_leak_teams": 0.29122916599976634, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording.py::TestClickhouseSessionRecording::test_get_snapshots_with_no_such_session": 0.22190202200022213, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_action_filter": 0.883022057999824, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_action_filter_materialized": 1.8651401509998777, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_filters_at_once": 0.5978954299998804, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_sessions_recording_object_keys": 0.4102441289999206, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_all_sessions_recording_object_keys_with_entity_filter": 0.47025691299973005, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_basic_query": 0.4093991229999574, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_basic_query_materialized": 0.8523190619998786, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_date_from_filter": 0.4867546189998393, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_date_to_filter": 0.5144772959999955, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_duration_filter": 0.5255066670001725, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter": 0.6503184659998169, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_matching_with_no_session_id": 1.1075125950001166, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_matching_with_no_session_id_materialized": 1.1731051129997923, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_cohort_properties": 1.3852593729998262, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_cohort_properties_materialized": 1.523237257000119, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_matching_on_session_id": 1.1149445409998862, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_matching_on_session_id_materialized": 1.2101223509998817, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_person_properties": 0.7940943860000971, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_person_properties_materialized": 0.9941151639998225, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_properties": 0.6748329199999716, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_event_filter_with_properties_materialized": 1.6434011219998865, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_multiple_event_filters": 0.6327269520002119, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_pagination": 0.6969983629999206, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_person_id_filter": 0.4529786000000513, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recording_that_spans_time_bounds": 0.3868296759999339, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recording_without_fullsnapshot_dont_appear": 0.3984844509998311, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_recordings_dont_leak_data_between_teams": 0.4271707359996526, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::TestClickhouseSessionRecordingsList::test_teams_dont_leak_event_filter": 0.4586310360002699, "clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_group_props": 0.7437108610001815, "clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props": 0.5387107490003018, "clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props_materialized": 1.4488532880000093, "clickhouse/queries/test/test_breakdown_props.py::TestBreakdownProps::test_breakdown_person_props_with_entity_filter": 0.8219678710001972, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_group_types_to_query": 0.24315415899991422, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_materialized_columns_checks": 0.5975584319999143, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_properties_used_in_filter": 0.2454650730001049, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_properties_used_in_filter_with_actions": 0.23375229800035413, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_should_query_element_chain_column": 0.23788502500019604, "clickhouse/queries/test/test_column_optimizer.py::TestColumnOptimizer::test_should_query_element_chain_column_with_actions": 0.24747188600008485, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_account_filters": 1.074630889999753, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_action_with_person_property_filter": 0.5016966169998796, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_basic_event_filter": 0.3339851410000847, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_cohort_filter": 0.5515465369996946, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_denormalised_props": 0.56255590700016, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_element": 0.5044206340000983, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_entity_filtered_by_cohort": 0.7179379000001518, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_event_properties_filter": 0.46707668399972135, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_groups_filters": 0.7906452509998871, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_groups_filters_mixed": 0.8496953309997934, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_person_properties_filter": 0.48780971699989095, "clickhouse/queries/test/test_event_query.py::TestEventQuery::test_static_cohort_filter": 0.4750795359998392, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_filter_test_accounts": 1.1058018660000926, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_interval_dates_days": 1.1543453759998101, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_interval_dates_months": 1.8981988289999663, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_interval_dates_weeks": 1.4133225490002133, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_edge_cases": 1.127395229000058, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend": 0.8701538789998722, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_action": 0.9258625370000573, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_all_time": 0.936211503000095, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_months": 0.9079583220000131, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_people": 1.173527224000054, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_people_paginated": 7.949091882000175, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_prop_filtering": 0.7907189719999224, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trend_weeks": 0.9017556829999194, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_lifecycle_trends_distinct_id_repeat": 0.6327626570000575, "clickhouse/queries/test/test_lifecycle.py::TestClickhouseLifecycle::test_test_account_filters_with_groups": 1.48633083000027, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_current_url_paths_and_logic": 3.3881451219997416, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_custom_event_paths": 0.7395652419997987, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_denormalized_properties": 3.294957727999872, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_denormalized_properties_materialized": 5.367241918000218, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_exclusion_filters_with_wildcards": 1.0880908659999022, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_inclusion_exclusion_filters": 2.522247999999763, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_event_inclusion_exclusion_filters_across_single_person": 1.6449643999999353, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_dropoff": 7.4565931380000166, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_dropoff_with_group_filter": 16.79978533999997, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step": 3.040937324000197, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step_limit": 8.707202808000147, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_after_step_respects_conversion_window": 6.788085211999942, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_before_dropoff": 3.039069284000334, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_before_step": 3.0428915160000543, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_funnel_between_step": 8.489838148000217, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping": 2.2948687670000254, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping_replacement": 1.0123477069998899, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_by_grouping_replacement_multiple": 1.3208543910000117, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_event_ordering": 2.798366987999998, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_grouping_across_people": 0.6912756439999157, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_grouping_with_evil_input": 0.6231273050000254, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_groups_filtering": 3.6725187910001296, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_min_edge_weight": 3.1614029789998312, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_recording": 0.8922667089998413, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_recording_for_dropoff": 3.256045671000038, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_recording_with_no_window_or_session_id": 1.79887409700018, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_recording_with_start_and_end": 3.5140249469998253, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_removes_duplicates": 0.6666982730000655, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_path_respect_session_limits": 0.6278188260000661, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_end": 0.7218085800000154, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_end_materialized": 1.7910657639999954, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_in_window": 0.6285672640001394, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_person_dropoffs": 5.015035291999766, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_properties_filter": 0.7640942919999816, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start": 1.5359508349997668, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_and_end": 2.95206028600046, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_and_end_materialized": 4.6532668120000835, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_paths_start_dropping_orphaned_edges": 0.9941630549999445, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_properties_queried_using_path_filter": 0.3207866830000512, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_screen_paths": 0.7595411319996401, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_step_conversion_times": 0.6413550639999812, "clickhouse/queries/test/test_paths.py::TestClickhousePaths::test_step_limit": 3.2503886500001045, "clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest": 0.011457969999810302, "clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest_with_dangling_and_cross_edges": 0.005490134000183389, "clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_basic_forest_with_dangling_edges": 0.004861230999722466, "clickhouse/queries/test/test_paths.py::TestClickhousePathsEdgeValidation::test_no_start_point": 0.006591542000251138, "clickhouse/queries/test/test_person_distinct_id_query.py::test_person_distinct_id_query": 0.014788990999932139, "clickhouse/queries/test/test_person_query.py::test_person_query": 0.45500310099987473, "clickhouse/queries/test/test_person_query.py::test_person_query_with_extra_requested_fields": 0.4221900990000904, "clickhouse/queries/test/test_person_query.py::test_person_query_with_entity_filters": 0.41167083399977855, "clickhouse/queries/test/test_person_query.py::test_person_query_with_extra_fields": 0.3937601220000033, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_day_interval": 0.6646724899997025, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_filter_test_accounts": 0.7632908980001503, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_first_time_retention": 0.9653550419998282, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_groups_aggregating": 2.042867272999956, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_groups_filtering": 2.2413548679999167, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_groups_in_period": 1.1467386009996972, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_hour_interval": 0.6959896920000119, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_interval_rounding": 0.693231373999879, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_month_interval": 0.7234894700000041, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_action_start_point": 0.7137843070001963, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_default": 0.6554696299999705, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_event_action": 0.6607448639999802, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_invalid_properties": 0.24224496500005444, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_multiple_events": 0.7805070379999961, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_basic": 0.6939605789998495, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_first_time": 1.2412672119999115, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_in_perieod_first_time": 1.008047906000229, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_in_period": 0.7215799569999035, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_people_paginated": 14.853945221999993, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_with_properties": 0.7289960739999515, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_retention_with_user_properties": 0.7912504559999434, "clickhouse/queries/test/test_retention.py::TestClickhouseRetention::test_week_interval": 0.7061474360000375, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_filtering": 0.7046399240000483, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_with_prop": 0.5277108380000755, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_action_with_prop_materialized": 1.096348828000373, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_active_user_math": 0.546558055000105, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_active_user_math_action": 0.5769726329997411, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_against_clashing_entity_and_property_filter_naming": 0.6698282149998249, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_against_clashing_entity_and_property_filter_naming_materialized": 1.368367717000183, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_all_time_timerange": 0.6478839739997966, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering": 0.7984168450002471, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_materialized": 1.5399874229999568, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_non_number_resiliency": 0.7952805250001802, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_avg_filtering_non_number_resiliency_materialized": 1.6412210749999758, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_bar_chart_by_value": 0.623295115999781, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_active_user_math": 0.6061812049999844, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_active_user_math_materialized": 1.225129749999951, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_cohort": 1.244574845000443, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_cohort_materialized": 1.7165280110000367, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_empty_cohort": 0.44373827500021434, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_group_props": 1.1524356259997148, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_group_props_with_person_filter": 1.1491034040000159, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property": 1.1675308280000536, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property_pie": 0.6823815790000936, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_person_property_pie_materialized": 0.9768878610000229, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_by_property_pie": 0.8225497539997377, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filter_by_precalculated_cohort": 1.0064445559999058, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering": 0.9175262660000953, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_bar_chart_by_value": 0.6286065680001229, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_bar_chart_by_value_materialized": 1.216997366999749, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_limit": 1.082741499000349, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_limit_materialized": 1.8370366620001732, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_materialized": 1.6425213249999615, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons": 0.6509509479999451, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_materialized": 0.9206755560001056, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_with_action_props": 0.7299601499996697, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_persons_with_action_props_materialized": 1.4902757960003328, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_with_properties": 0.6779576119997728, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_with_properties_in_new_format": 1.2840342050001254, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_filtering_with_properties_materialized": 2.140008432000286, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_label": 0.23358455700008562, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_multiple_cohorts": 1.3039251660002265, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_multiple_cohorts_materialized": 1.838172984999801, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_single_cohort": 0.9041514330001519, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_single_cohort_materialized": 1.2822540989998288, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_user_props_with_filter": 0.7093174209999233, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_user_props_with_filter_materialized": 1.4340936560001865, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_filter": 0.5447207739998703, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_filter_groups": 0.881371518999913, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_filter_materialized": 1.1038812140000118, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_person_property_filter": 1.1409580429997277, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_breakdown_with_person_property_filter_materialized": 2.18452530799982, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_combine_all_cohort_and_icontains": 0.7873676830001841, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_combine_all_cohort_and_icontains_materialized": 1.4613115239997114, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_custom_range_timerange": 0.6252466060000188, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_filtering": 0.8750444849999894, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering": 1.0275893790001192, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_materialized": 1.7095566189998408, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_with_prop_filter": 1.0363791869997385, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_dau_with_breakdown_filtering_with_prop_filter_materialized": 2.293533518000004, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_day_interval": 0.5372074260003501, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_entity_person_property_filtering": 0.708318808000513, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_entity_person_property_filtering_materialized": 0.9376633669999137, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_by_precalculated_cohort": 0.8165361639998991, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_events_by_cohort": 0.509648006000134, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_events_by_cohort_materialized": 0.7256666099997346, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts": 0.8450639929997124, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_cohorts": 0.5007618029999321, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_cohorts_materialized": 0.6885697419997996, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filter_test_accounts_materialized": 1.682076362000771, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filtering_with_action_props": 0.3993766499997946, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filtering_with_action_props_materialized": 1.4885540589998527, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_filtering_with_group_props": 0.8323315830002684, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_hour_interval": 0.47221817999979976, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering": 1.2096229829999174, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering_breakdown": 1.5633576120008001, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_filtering_breakdown_materialized": 2.004998981999506, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_rounding": 0.5565811219998977, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_interval_rounding_monthly": 0.4570427909998216, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last14days_timerange": 0.6957263019999118, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last24hours_timerange": 0.5773279580007511, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last30days_timerange": 0.671400227000504, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last48hours_timerange": 0.563124395000159, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last7days_timerange": 0.5733055109999441, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_last90days_timerange": 0.8081714809991354, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_max_filtering": 0.8303997339994567, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_max_filtering_materialized": 1.5396787710001263, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_median_filtering": 2.167772586999945, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_median_filtering_materialized": 3.6218609810002818, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_min_filtering": 0.8022209000000657, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_min_filtering_materialized": 1.5255202879993703, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_month_interval": 0.4944628030007152, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_no_props": 0.37846128799992584, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p90_filtering": 2.0922726060002788, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p90_filtering_materialized": 3.68327783299992, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p95_filtering": 2.095263717999842, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p95_filtering_materialized": 3.5037541609999607, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p99_filtering": 2.082477471999937, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_p99_filtering_materialized": 3.5499886600000536, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_per_entity_filtering": 0.7360381259995847, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_per_entity_filtering_materialized": 1.4257766279997668, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_person_property_filtering": 0.6686522629997853, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_person_property_filtering_materialized": 0.9348132760005683, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_previous_month_timerange": 0.6710353899998154, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_property_filtering": 0.6369250029997602, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_property_filtering_materialized": 1.270641360999889, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_response_empty_if_no_events": 0.5093135560000519, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_should_throw_exception": 0.519960675999755, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_sum_filtering": 0.8352491399991777, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_sum_filtering_materialized": 1.5079321429998345, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_this_month_timerange": 0.6460136019995844, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_today_timerange": 0.5175185489997602, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate": 0.5673361120002482, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_cohorts": 0.7511300900000606, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_cohorts_materialized": 0.9816843950002294, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_single_aggregate_math": 0.7286230330000762, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_with_math_func": 1.7888063139994301, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_breakdown_with_math_func_materialized": 3.4919757630000277, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_compare": 0.8258102309996502, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_for_non_existing_action": 0.3714096969997627, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_math_without_math_property": 0.21842486899959113, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day": 0.6987832960003288, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day_48hours": 0.6582537359995513, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_per_day_cumulative": 0.5893926579997242, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_regression_filtering_by_action_with_person_properties": 0.8432286250003926, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_regression_filtering_by_action_with_person_properties_materialized": 1.2663524069998857, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_dau": 0.6936826360006307, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_math": 0.6170514700002059, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_trends_single_aggregate_math_materialized": 1.2664123059998929, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_week_interval": 0.6634324940000624, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_year_to_date_timerange": 0.6772948520001592, "clickhouse/queries/test/test_trends.py::TestClickhouseTrends::test_yesterday_timerange": 0.5290308759995241, "clickhouse/queries/test/test_util.py::test_get_earliest_timestamp": 0.3531473869998081, "clickhouse/queries/test/test_util.py::test_get_earliest_timestamp_with_no_events": 0.25394326900050146, "clickhouse/queries/test/test_util.py::test_parse_breakdown_cohort_query": 0.28484161800042784, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown": 0.90428451400021, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_cohort": 0.9874005520000537, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_counts_of_different_events_one_without_events": 0.7694312909998189, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_breakdown_mismatching_sizes": 0.9089147670001694, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_compare": 0.8966546269998616, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_cumulative": 0.7133831579999423, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_day_interval": 0.6950103509998371, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_event_properties": 0.716653094999856, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_formula": 2.047238822000054, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_global_properties": 0.7351242009999623, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_hour_interval": 0.7155216799997106, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_interval_rounding": 0.47989501900019604, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_month_interval": 0.6844273279994013, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_multiple_events": 0.7808311170001616, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_pie": 0.6651766129998578, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_properties_with_escape_params": 0.7508227800003624, "clickhouse/queries/trends/test/test_formula.py::TestFormula::test_week_interval": 0.732694475000244, "clickhouse/queries/trends/test/test_person.py::TestPerson::test_group_query_includes_recording_events": 0.5384214800001246, "clickhouse/queries/trends/test/test_person.py::TestPerson::test_person_query_does_not_include_recording_events_if_flag_not_set": 0.5219140760004848, "clickhouse/queries/trends/test/test_person.py::TestPerson::test_person_query_includes_recording_events": 0.5095445249999102, "clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_calculate_usage": 0.4276355029996921, "clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_updating_event_properties_or_related_updates_property_definitions": 18.886705246000474, "clickhouse/test/test_calculate_event_property_usage.py::CalculateEventPropertyUsage::test_updating_team_events_or_related_updates_event_definitions": 18.96476181700018, "clickhouse/test/test_client.py::ClickhouseClientTestCase::test_cache_eviction": 0.1144139829998494, "clickhouse/test/test_client.py::ClickhouseClientTestCase::test_caching_client": 0.008758400000260735, "clickhouse/test/test_client.py::ClickhouseClientTestCase::test_client_strips_comments_from_request": 0.015356273000179499, "clickhouse/test/test_middleware.py::TestQueryMiddleware::test_query": 1.1683267019998311, "clickhouse/test/test_system_status.py::test_system_status": 0.11663930700024139, "clickhouse/views/test/test_clickhouse_experiment_secondary_results.py::ClickhouseTestExperimentSecondaryResults::test_basic_secondary_metric_results": 2.3230102390002685, "clickhouse/views/test/test_clickhouse_experiment_secondary_results.py::ClickhouseTestExperimentSecondaryResults::test_secondary_metric_results_for_multiple_variants": 1.734905845999947, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_can_list_experiments": 0.27526728499969977, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_cannot_list_experiments_without_proper_license": 0.028728121999392897, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_cant_reuse_existing_feature_flag": 0.047463832999710576, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_create_multivariate_experiment": 0.14148128499982704, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_creating_invalid_multivariate_experiment_no_control": 0.034659888000078354, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_creating_updating_basic_experiment": 0.07716886400021394, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_creating_updating_experiment_with_group_aggregation": 0.10900962099958633, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_deleting_experiment_soft_deletes_feature_flag": 0.2556160649996855, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_deleting_feature_flag_deletes_experiment": 0.07549634499991953, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_draft_experiment_doesnt_have_FF_active": 0.05245168700002978, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_draft_experiment_doesnt_have_FF_active_even_after_updates": 0.10859391699978005, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_draft_experiment_participants_update_updates_FF": 0.11081524199971682, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_getting_archived_experiments": 0.11277496400043674, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_invalid_create": 0.04871064600001773, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_invalid_update": 0.06834576599976572, "clickhouse/views/test/test_clickhouse_experiments.py::TestExperimentCRUD::test_launching_draft_experiment_activates_FF": 0.08072240600040459, "clickhouse/views/test/test_clickhouse_experiments.py::ClickhouseTestFunnelExperimentResults::test_experiment_flow_with_event_results": 1.6418293030001223, "clickhouse/views/test/test_clickhouse_experiments.py::ClickhouseTestFunnelExperimentResults::test_experiment_flow_with_event_results_for_three_test_variants": 2.2204734159995496, "clickhouse/views/test/test_clickhouse_experiments.py::ClickhouseTestTrendExperimentResults::test_experiment_flow_with_event_results": 1.967549980000058, "clickhouse/views/test/test_clickhouse_experiments.py::ClickhouseTestTrendExperimentResults::test_experiment_flow_with_event_results_for_three_test_variants": 1.763380889000473, "clickhouse/views/test/test_clickhouse_experiments.py::ClickhouseTestTrendExperimentResults::test_experiment_flow_with_event_results_for_two_test_variants_with_varying_exposures": 1.5084854270003234, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_empty_property_values": 0.3213216059998558, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_groups_list": 0.3098136769995108, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_property_definitions": 0.29085323899971627, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_property_values": 0.2954054760002691, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_related_groups": 0.8371019810001599, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_related_groups_person": 0.9394128149997414, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_retrieve_group": 0.324528397999984, "clickhouse/views/test/test_clickhouse_groups.py::ClickhouseTestGroupsApi::test_update_groups_metadata": 0.26823237400003563, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format": 1.0082933800003957, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_funnel_path_get": 1.9760261089995765, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_funnel_path_post": 1.9363886709998042, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_path_start_key_constraints": 0.9532877379997444, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_format_with_start_point_constraints": 1.101382413999545, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_pagination": 2.400572041000487, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_basic_pagination_with_deleted": 10.387213124000027, "clickhouse/views/test/test_clickhouse_path_person.py::TestPathPerson::test_create_paths_cohort": 0.9728452740000648, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_backwards_compatible_path_types": 1.332943140000225, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_backwards_compatible_start_point": 1.4490154019995316, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_funnel_path_post": 2.0893301179994523, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_insight_paths_basic": 0.6426262409995616, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_insight_paths_basic_exclusions": 0.6892590589995962, "clickhouse/views/test/test_clickhouse_paths.py::TestClickhousePaths::test_path_groupings": 1.144658520000121, "clickhouse/views/test/test_clickhouse_retention.py::RetentionTests::test_retention_test_account_filters": 2.80657780599995, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_get_retention_cohort_breakdown": 1.2412932519996502, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_get_retention_cohort_breakdown_with_retention_type_target": 1.185107022000011, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_event_property": 1.1811183760005406, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_event_property_and_retrieve_people": 0.8958119679996344, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_event_property_and_retrieve_people_materialized": 1.7925422469998011, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_event_property_materialized": 2.1258863269999893, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_person_property": 1.5877514960002372, "clickhouse/views/test/test_clickhouse_retention.py::BreakdownTests::test_can_specify_breakdown_person_property_materialized": 2.1877489099997547, "clickhouse/views/test/test_clickhouse_retention.py::IntervalTests::test_can_get_retention_week_interval": 1.199537519000387, "clickhouse/views/test/test_clickhouse_retention.py::RegressionTests::test_can_get_actors_and_use_percent_char_filter": 0.6791626849999375, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_default_limit_of_chunks": 0.6475478219999786, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_metadata_for_chunked_session_recording": 19.598255986999902, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_session_recordings": 0.4766140629999427, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_single_session_recording_metadata": 0.35214388900067206, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_get_snapshots_for_chunked_session_recording": 22.205296735999582, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_request_to_another_teams_endpoint_returns_401": 0.2614651330000015, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_doesnt_exist": 0.28716912200025035, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_for_user_with_multiple_distinct_ids": 0.43796151300011843, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recording_with_no_person": 0.2834169779998774, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_session_recordings_dont_leak_teams": 0.45294108100051744, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_setting_viewed_state_of_session_recording": 0.7899261609995847, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_single_session_recording_doesnt_leak_teams": 0.32720627200023955, "clickhouse/views/test/test_clickhouse_session_recordings.py::ClickhouseTestSessionRecordingsAPI::test_viewed_state_of_session_recording": 0.4179604890000519, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_aggregate_by_groups": 1.6487348940004267, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_compare": 0.7141587269998126, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_filter_by_group_properties": 1.6667034380002406, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_filter_test_accounts": 0.7670524239997576, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness": 0.7002249709998978, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_action": 0.6573319899998751, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_all_time": 0.6746182829997451, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_entity_filter": 0.6452071540006727, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_hours": 0.6560733739997886, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_months": 0.6639575650001461, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_people_endpoint": 0.6385922779995781, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_people_paginated": 9.333654103999834, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_people_with_entity_filter": 0.5848976590004895, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_prop_filter": 0.6334266020007817, "clickhouse/views/test/test_clickhouse_stickiness.py::TestClickhouseStickiness::test_stickiness_weeks": 0.6371774450003613, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_breakdown_with_filter": 1.1029224670000985, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_breakdown_with_filter_materialized": 1.8954865550003888, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_insight_trends_aggregate": 0.8149526380002499, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_insight_trends_basic": 0.9567830269998012, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_insight_trends_clean_arg": 0.9351838770003269, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_insight_trends_compare": 0.8264858539996567, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrends::test_insight_trends_cumulative": 2.981720184000551, "clickhouse/views/test/test_clickhouse_trends.py::ClickhouseTestTrendsGroups::test_aggregating_by_group": 0.9451326820008035, "clickhouse/views/test/funnel/test_clickhouse_funnel.py::ClickhouseTestFunnelGroups::test_funnel_aggregation_with_groups": 1.436297280999952, "clickhouse/views/test/funnel/test_clickhouse_funnel.py::ClickhouseTestFunnelGroups::test_funnel_group_aggregation_with_groups_entity_filtering": 1.4970255660000475, "clickhouse/views/test/funnel/test_clickhouse_funnel.py::ClickhouseTestFunnelGroups::test_funnel_with_groups_entity_filtering": 1.2812429720006548, "clickhouse/views/test/funnel/test_clickhouse_funnel.py::ClickhouseTestFunnelGroups::test_funnel_with_groups_global_filtering": 1.623736411999289, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_correlation_endpoint_request_with_no_steps_doesnt_fail": 0.29538281300028757, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_correlation_endpoint_with_properties": 1.9257532009996794, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_does_not_include_funnel_steps": 0.9482588349997059, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_does_not_include_historical_events": 0.833621951999703, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_endpoint_picks_up_events_for_odds_ratios": 0.8719883809999374, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_event_correlation_is_partitioned_by_team": 1.3196054000000004, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_events_correlation_endpoint_provides_people_drill_down_urls": 1.481358815999556, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_events_with_properties_correlation_endpoint_provides_people_drill_down_urls": 1.7850596209996183, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_funnel_correlation_with_event_properties_autocapture": 1.8398892359996353, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_properties_correlation_endpoint_provides_people_drill_down_urls": 2.1810598590000154, "clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py::FunnelCorrelationTest::test_requires_authn": 0.23988138899994738, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_format": 0.925652576000175, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_pagination": 9.797648144999584, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelPerson::test_basic_pagination_with_deleted": 10.813566740999704, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelPerson::test_breakdown_basic_pagination": 9.947067232000336, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelPerson::test_breakdowns": 1.1854758939998646, "clickhouse/views/test/funnel/test_clickhouse_funnel_person.py::TestFunnelCorrelationActors::test_pagination": 2.1129862930001764, "clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py::TestFunnelTrendsPerson::test_basic_format": 1.1836482720004824, "clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py::TestFunnelTrendsPerson::test_strict_order": 0.8608176520001507, "clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py::TestFunnelTrendsPerson::test_unordered": 1.8821931039997253, "clickhouse/views/test/funnel/test_clickhouse_funnel_unordered.py::ClickhouseTestUnorderedFunnelGroups::test_unordered_funnel_with_groups": 2.270931663999818, "kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_interface": 0.008639997000500443, "kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_produce": 0.3887700590003078, "kafka_client/test/test_client.py::KafkaClientTestCase::test_kafka_produce_and_consume": 0.34380175500018595, "models/test/test_event_definition_model.py::TestEventDefinition::test_default_verified_false": 0.23576914399973248, "models/test/test_event_definition_model.py::TestEventDefinition::test_errors_on_invalid_verified_by_type": 0.007267781999871659, "tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_calculate_cohorts": 0.24113980399988577, "tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_funnels_cohort": 0.617903429999842, "tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_stickiness_cohort": 0.38702683999963483, "tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_trends_cohort": 0.530424948000018, "tasks/test/test_calculate_cohort.py::TestClickhouseCalculateCohort::test_create_trends_cohort_arg_test": 0.5648505340000156, "tasks/test/test_send_license_usage.py::SendLicenseUsageTest::test_send_license_error": 6.438404195999738, "tasks/test/test_send_license_usage.py::SendLicenseUsageTest::test_send_license_usage": 2.267681829000594, "tasks/test/test_send_license_usage.py::SendLicenseUsageNoLicenseTest::test_no_license": 0.3907684809996681, "tasks/test/test_status_report.py::TestStatusReport::test_instance_status_report_event_counts": 2.1916582759999983, "tasks/test/test_status_report.py::TestStatusReport::test_status_report": 0.14247239700034697, "tasks/test/test_status_report.py::TestStatusReport::test_status_report_duplicate_distinct_ids": 0.26345755400006965, "tasks/test/test_status_report.py::TestStatusReport::test_status_report_multiple_ids_per_person": 0.2914452680006434, "tasks/test/test_status_report.py::TestStatusReport::test_status_report_plugins": 0.14896207099991443, "clickhouse/models/test/test_property.py::test_combine_group_properties": 0.002199124000071606, "clickhouse/models/test/test_property.py::also_test_with_materialized_columns": 0.0018008200004260289, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_absolute_loss_less_than_one_percent_but_not_significant": 0.3467312879997735, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results": 0.3454197720002412, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results_for_three_test_variants": 7.3001690630003395, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results_for_three_test_variants_almost_equal": 9.130410286999904, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results_for_three_test_variants_much_better_than_control": 14.885644429999957, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results_for_two_test_variants": 0.7631933580000805, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_calculate_results_for_two_test_variants_almost_equal": 0.5713543069996376, "clickhouse/queries/experiments/test_experiment_result.py::TestFunnelExperimentCalculator::test_simulation_result_is_close_to_closed_form_solution": 0.14891976999933831, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_calculate_count_data_probability": 0.0025660299997980474, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_calculate_results": 0.13598782600001869, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_calculate_results_small_numbers": 0.11909343700017416, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_calculate_results_with_three_variants": 0.2792237309995471, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_calculate_significance_when_target_variants_underperform": 0.006684674000098312, "clickhouse/queries/experiments/test_experiment_result.py::TestTrendExperimentCalculator::test_results_with_different_exposures": 0.27084133800008203, "clickhouse/queries/funnels/test/test_funnel.py::also_test_with_materialized_columns": 0.0020041239999954996, "clickhouse/queries/funnels/test/test_funnel_correlation.py::TestCorrelationFunctions::test_are_results_insignificant": 0.002225425000233372, "clickhouse/queries/funnels/test/test_funnel_correlation.py::also_test_with_materialized_columns": 0.0018001209996327816, "clickhouse/queries/funnels/test/test_funnel_persons.py::also_test_with_materialized_columns": 0.0017705199998090393, "clickhouse/queries/session_recordings/test/test_clickhouse_session_recording_list.py::also_test_with_materialized_columns": 0.0017216200003531412, "clickhouse/queries/test/test_breakdown_props.py::also_test_with_materialized_columns": 0.0016819190000205708, "clickhouse/queries/test/test_groups_join_query.py::test_groups_join_query_blank": 0.0019311209994157252, "clickhouse/queries/test/test_groups_join_query.py::test_groups_join_query_filtering": 0.0032180359999074426, "clickhouse/queries/test/test_groups_join_query.py::test_groups_join_query_filtering_with_custom_key_names": 0.0026504300003580283, "clickhouse/queries/test/test_paths.py::also_test_with_materialized_columns": 0.0018291199999111996, "clickhouse/queries/test/test_trends.py::also_test_with_materialized_columns": 0.0016628190001028997, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS cohortpeople ON CLUSTER posthog\\n(\\n person_id UUID,\\n cohort_id Int64,\\n team_id Int64,\\n sign Int8\\n) ENGINE = CollapsingMergeTree(sign)\\nOrder By (team_id, cohort_id, person_id)\\n\\n]": 0.005352860000584769, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS person_static_cohort ON CLUSTER posthog\\n(\\n id UUID,\\n person_id UUID,\\n cohort_id Int64,\\n team_id Int64\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, cohort_id, person_id, id)\\n\\n]": 0.002348525999877893, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS events_dead_letter_queue ON CLUSTER posthog\\n(\\n id UUID,\\n event_uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n distinct_id VARCHAR,\\n team_id Int64,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC'),\\n ip VARCHAR,\\n site_url VARCHAR,\\n now DateTime64(6, 'UTC'),\\n raw_payload VARCHAR,\\n error_timestamp DateTime64(6, 'UTC'),\\n error_location VARCHAR,\\n error VARCHAR,\\n tags Array(VARCHAR)\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nORDER BY (id, event_uuid, distinct_id, team_id)\\n\\nSETTINGS index_granularity=512\\n]": 0.0026635300000634743, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW IF NOT EXISTS events_dead_letter_queue_mv ON CLUSTER posthog\\nTO posthog_test.events_dead_letter_queue\\nAS SELECT\\nid,\\nevent_uuid,\\nevent,\\nproperties,\\ndistinct_id,\\nteam_id,\\nelements_chain,\\ncreated_at,\\nip,\\nsite_url,\\nnow,\\nraw_payload,\\nerror_timestamp,\\nerror_location,\\nerror,\\ntags,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_events_dead_letter_queue\\n]": 0.0022367260003193223, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_events_dead_letter_queue ON CLUSTER posthog\\n(\\n id UUID,\\n event_uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n distinct_id VARCHAR,\\n team_id Int64,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC'),\\n ip VARCHAR,\\n site_url VARCHAR,\\n now DateTime64(6, 'UTC'),\\n raw_payload VARCHAR,\\n error_timestamp DateTime64(6, 'UTC'),\\n error_location VARCHAR,\\n error VARCHAR,\\n tags Array(VARCHAR)\\n \\n) ENGINE = Kafka('kafka:9092', 'events_dead_letter_queue_test', 'group1', 'JSONEachRow')\\n]": 0.0023466259995075234, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS events ON CLUSTER posthog\\n(\\n uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n , $group_0 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_0')) COMMENT 'column_materializer::$group_0'\\n , $group_1 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_1')) COMMENT 'column_materializer::$group_1'\\n , $group_2 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_2')) COMMENT 'column_materializer::$group_2'\\n , $group_3 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_3')) COMMENT 'column_materializer::$group_3'\\n , $group_4 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_4')) COMMENT 'column_materializer::$group_4'\\n , $window_id VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$window_id')) COMMENT 'column_materializer::$window_id'\\n , $session_id VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$session_id')) COMMENT 'column_materializer::$session_id'\\n\\n\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY toYYYYMM(timestamp)\\nORDER BY (team_id, toDate(timestamp), event, cityHash64(distinct_id), cityHash64(uuid))\\n\\n\\n]": 0.0023610260000168637, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = \\n Kafka () SETTINGS\\n kafka_broker_list = 'kafka:9092',\\n kafka_topic_list = 'clickhouse_events_proto_test',\\n kafka_group_name = 'group1',\\n kafka_format = 'Protobuf',\\n kafka_schema = 'events:Event',\\n kafka_skip_broken_messages = 100\\n \\n]": 0.0024021270000957884, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW events_mv ON CLUSTER posthog\\nTO posthog_test.events\\nAS SELECT\\nuuid,\\nevent,\\nproperties,\\ntimestamp,\\nteam_id,\\ndistinct_id,\\nelements_chain,\\ncreated_at,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_events\\n]": 0.0023286249993361707, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS groups ON CLUSTER posthog\\n(\\n group_type_index UInt8,\\n group_key VARCHAR,\\n created_at DateTime64,\\n team_id Int64,\\n group_properties VARCHAR\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, group_type_index, group_key)\\n\\n]": 0.0024116260005939694, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_groups ON CLUSTER posthog\\n(\\n group_type_index UInt8,\\n group_key VARCHAR,\\n created_at DateTime64,\\n team_id Int64,\\n group_properties VARCHAR\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_groups_test', 'group1', 'JSONEachRow')\\n]": 0.002340027000172995, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW groups_mv ON CLUSTER posthog\\nTO posthog_test.groups\\nAS SELECT\\ngroup_type_index,\\ngroup_key,\\ncreated_at,\\nteam_id,\\ngroup_properties,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_groups\\n]": 0.002318927000487747, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS person ON CLUSTER posthog\\n(\\n id UUID,\\n created_at DateTime64,\\n team_id Int64,\\n properties VARCHAR,\\n is_identified Boolean,\\n is_deleted Boolean DEFAULT 0\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, id)\\n\\n]": 0.0023635270003978803, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_person ON CLUSTER posthog\\n(\\n id UUID,\\n created_at DateTime64,\\n team_id Int64,\\n properties VARCHAR,\\n is_identified Boolean,\\n is_deleted Boolean DEFAULT 0\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_test', 'group1', 'JSONEachRow')\\n]": 0.0023795279998921615, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW person_mv ON CLUSTER posthog\\nTO posthog_test.person\\nAS SELECT\\nid,\\ncreated_at,\\nteam_id,\\nproperties,\\nis_identified,\\nis_deleted,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_person\\n]": 0.002349227000195242, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS person_distinct_id ON CLUSTER posthog\\n(\\n distinct_id VARCHAR,\\n person_id UUID,\\n team_id Int64,\\n _sign Int8 DEFAULT 1,\\n is_deleted Int8 ALIAS if(_sign==-1, 1, 0)\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = CollapsingMergeTree(_sign)\\nOrder By (team_id, distinct_id, person_id)\\n\\n]": 0.002360926000164909, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE kafka_person_distinct_id ON CLUSTER posthog\\n(\\n distinct_id VARCHAR,\\n person_id UUID,\\n team_id Int64,\\n _sign Nullable(Int8),\\n is_deleted Nullable(Int8)\\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_unique_id_test', 'group1', 'JSONEachRow')\\n]": 0.002332624999780819, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW person_distinct_id_mv ON CLUSTER posthog\\nTO posthog_test.person_distinct_id\\nAS SELECT\\ndistinct_id,\\nperson_id,\\nteam_id,\\ncoalesce(_sign, if(is_deleted==0, 1, -1)) AS _sign,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_person_distinct_id\\n]": 0.0023313260003305913, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS person_distinct_id2 ON CLUSTER posthog\\n(\\n team_id Int64,\\n distinct_id VARCHAR,\\n person_id UUID,\\n is_deleted Boolean,\\n version Int64 DEFAULT 1\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n, _partition UInt64\\n) ENGINE = ReplacingMergeTree(version)\\n\\n ORDER BY (team_id, distinct_id)\\n SETTINGS index_granularity = 512\\n ]": 0.002462426999500167, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_person_distinct_id2 ON CLUSTER posthog\\n(\\n team_id Int64,\\n distinct_id VARCHAR,\\n person_id UUID,\\n is_deleted Boolean,\\n version Int64 DEFAULT 1\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_distinct_id_test', 'group1', 'JSONEachRow')\\n]": 0.002362025999900652, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW person_distinct_id2_mv ON CLUSTER posthog\\nTO posthog_test.person_distinct_id2\\nAS SELECT\\nteam_id,\\ndistinct_id,\\nperson_id,\\nis_deleted,\\nversion,\\n_timestamp,\\n_offset,\\n_partition\\nFROM posthog_test.kafka_person_distinct_id2\\n]": 0.002344527000332164, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_plugin_log_entries ON CLUSTER posthog\\n(\\n id UUID,\\n team_id Int64,\\n plugin_id Int64,\\n plugin_config_id Int64,\\n timestamp DateTime64(6, 'UTC'),\\n source VARCHAR,\\n type VARCHAR,\\n message VARCHAR,\\n instance_id UUID\\n \\n) ENGINE = Kafka('kafka:9092', 'plugin_log_entries_test', 'group1', 'JSONEachRow')\\n]": 0.0023709269999017124, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS plugin_log_entries ON CLUSTER posthog\\n(\\n id UUID,\\n team_id Int64,\\n plugin_id Int64,\\n plugin_config_id Int64,\\n timestamp DateTime64(6, 'UTC'),\\n source VARCHAR,\\n type VARCHAR,\\n message VARCHAR,\\n instance_id UUID\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY plugin_id ORDER BY (team_id, id)\\n\\nSETTINGS index_granularity=512\\n]": 0.0024164280002878513, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW plugin_log_entries_mv ON CLUSTER posthog\\nTO posthog_test.plugin_log_entries\\nAS SELECT\\nid,\\nteam_id,\\nplugin_id,\\nplugin_config_id,\\ntimestamp,\\nsource,\\ntype,\\nmessage,\\ninstance_id,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_plugin_log_entries\\n]": 0.0023598259999744187, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS session_recording_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n session_id VARCHAR,\\n window_id VARCHAR,\\n snapshot_data VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n , has_full_snapshot BOOLEAN materialized JSONExtractBool(snapshot_data, 'has_full_snapshot')\\n\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY toYYYYMMDD(timestamp)\\nORDER BY (team_id, toHour(timestamp), session_id, timestamp, uuid)\\n\\nSETTINGS index_granularity=512\\n]": 0.002342526000120415, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE MATERIALIZED VIEW session_recording_events_mv ON CLUSTER posthog\\nTO posthog_test.session_recording_events\\nAS SELECT\\nuuid,\\ntimestamp,\\nteam_id,\\ndistinct_id,\\nsession_id,\\nwindow_id,\\nsnapshot_data,\\ncreated_at,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_session_recording_events\\n]": 0.0023197260002234543, "clickhouse/sql/test/test_schema.py::test_create_table_query[\\nCREATE TABLE IF NOT EXISTS kafka_session_recording_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n session_id VARCHAR,\\n window_id VARCHAR,\\n snapshot_data VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_session_recording_events_test', 'group1', 'JSONEachRow')\\n]": 0.002362226999593986, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS cohortpeople ON CLUSTER posthog\\n(\\n person_id UUID,\\n cohort_id Int64,\\n team_id Int64,\\n sign Int8\\n) ENGINE = CollapsingMergeTree(sign)\\nOrder By (team_id, cohort_id, person_id)\\n\\n]": 0.0030694339993715403, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS person_static_cohort ON CLUSTER posthog\\n(\\n id UUID,\\n person_id UUID,\\n cohort_id Int64,\\n team_id Int64\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, cohort_id, person_id, id)\\n\\n]": 0.00296903399976145, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS events_dead_letter_queue ON CLUSTER posthog\\n(\\n id UUID,\\n event_uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n distinct_id VARCHAR,\\n team_id Int64,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC'),\\n ip VARCHAR,\\n site_url VARCHAR,\\n now DateTime64(6, 'UTC'),\\n raw_payload VARCHAR,\\n error_timestamp DateTime64(6, 'UTC'),\\n error_location VARCHAR,\\n error VARCHAR,\\n tags Array(VARCHAR)\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nORDER BY (id, event_uuid, distinct_id, team_id)\\n\\nSETTINGS index_granularity=512\\n]": 0.002974233999793796, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW IF NOT EXISTS events_dead_letter_queue_mv ON CLUSTER posthog\\nTO posthog_test.events_dead_letter_queue\\nAS SELECT\\nid,\\nevent_uuid,\\nevent,\\nproperties,\\ndistinct_id,\\nteam_id,\\nelements_chain,\\ncreated_at,\\nip,\\nsite_url,\\nnow,\\nraw_payload,\\nerror_timestamp,\\nerror_location,\\nerror,\\ntags,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_events_dead_letter_queue\\n]": 0.002508528998987458, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_events_dead_letter_queue ON CLUSTER posthog\\n(\\n id UUID,\\n event_uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n distinct_id VARCHAR,\\n team_id Int64,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC'),\\n ip VARCHAR,\\n site_url VARCHAR,\\n now DateTime64(6, 'UTC'),\\n raw_payload VARCHAR,\\n error_timestamp DateTime64(6, 'UTC'),\\n error_location VARCHAR,\\n error VARCHAR,\\n tags Array(VARCHAR)\\n \\n) ENGINE = Kafka('kafka:9092', 'events_dead_letter_queue_test', 'group1', 'JSONEachRow')\\n]": 0.0026259289998051827, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS events ON CLUSTER posthog\\n(\\n uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n , $group_0 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_0')) COMMENT 'column_materializer::$group_0'\\n , $group_1 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_1')) COMMENT 'column_materializer::$group_1'\\n , $group_2 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_2')) COMMENT 'column_materializer::$group_2'\\n , $group_3 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_3')) COMMENT 'column_materializer::$group_3'\\n , $group_4 VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$group_4')) COMMENT 'column_materializer::$group_4'\\n , $window_id VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$window_id')) COMMENT 'column_materializer::$window_id'\\n , $session_id VARCHAR materialized trim(BOTH '\"' FROM JSONExtractRaw(properties, '$session_id')) COMMENT 'column_materializer::$session_id'\\n\\n\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY toYYYYMM(timestamp)\\nORDER BY (team_id, toDate(timestamp), event, cityHash64(distinct_id), cityHash64(uuid))\\n\\n\\n]": 0.002947933000541525, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = \\n Kafka () SETTINGS\\n kafka_broker_list = 'kafka:9092',\\n kafka_topic_list = 'clickhouse_events_proto_test',\\n kafka_group_name = 'group1',\\n kafka_format = 'Protobuf',\\n kafka_schema = 'events:Event',\\n kafka_skip_broken_messages = 100\\n \\n]": 0.0026926309997179487, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW events_mv ON CLUSTER posthog\\nTO posthog_test.events\\nAS SELECT\\nuuid,\\nevent,\\nproperties,\\ntimestamp,\\nteam_id,\\ndistinct_id,\\nelements_chain,\\ncreated_at,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_events\\n]": 0.00259983000023567, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS groups ON CLUSTER posthog\\n(\\n group_type_index UInt8,\\n group_key VARCHAR,\\n created_at DateTime64,\\n team_id Int64,\\n group_properties VARCHAR\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, group_type_index, group_key)\\n\\n]": 0.0029325320001589716, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_groups ON CLUSTER posthog\\n(\\n group_type_index UInt8,\\n group_key VARCHAR,\\n created_at DateTime64,\\n team_id Int64,\\n group_properties VARCHAR\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_groups_test', 'group1', 'JSONEachRow')\\n]": 0.0026240289998895605, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW groups_mv ON CLUSTER posthog\\nTO posthog_test.groups\\nAS SELECT\\ngroup_type_index,\\ngroup_key,\\ncreated_at,\\nteam_id,\\ngroup_properties,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_groups\\n]": 0.002606729000035557, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS person ON CLUSTER posthog\\n(\\n id UUID,\\n created_at DateTime64,\\n team_id Int64,\\n properties VARCHAR,\\n is_identified Boolean,\\n is_deleted Boolean DEFAULT 0\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nOrder By (team_id, id)\\n\\n]": 0.0029596330005006166, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_person ON CLUSTER posthog\\n(\\n id UUID,\\n created_at DateTime64,\\n team_id Int64,\\n properties VARCHAR,\\n is_identified Boolean,\\n is_deleted Boolean DEFAULT 0\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_test', 'group1', 'JSONEachRow')\\n]": 0.002599428999928932, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW person_mv ON CLUSTER posthog\\nTO posthog_test.person\\nAS SELECT\\nid,\\ncreated_at,\\nteam_id,\\nproperties,\\nis_identified,\\nis_deleted,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_person\\n]": 0.0025470290001976537, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS person_distinct_id ON CLUSTER posthog\\n(\\n distinct_id VARCHAR,\\n person_id UUID,\\n team_id Int64,\\n _sign Int8 DEFAULT 1,\\n is_deleted Int8 ALIAS if(_sign==-1, 1, 0)\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = CollapsingMergeTree(_sign)\\nOrder By (team_id, distinct_id, person_id)\\n\\n]": 0.0028019319997838465, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE kafka_person_distinct_id ON CLUSTER posthog\\n(\\n distinct_id VARCHAR,\\n person_id UUID,\\n team_id Int64,\\n _sign Nullable(Int8),\\n is_deleted Nullable(Int8)\\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_unique_id_test', 'group1', 'JSONEachRow')\\n]": 0.0024372270004278107, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW person_distinct_id_mv ON CLUSTER posthog\\nTO posthog_test.person_distinct_id\\nAS SELECT\\ndistinct_id,\\nperson_id,\\nteam_id,\\ncoalesce(_sign, if(is_deleted==0, 1, -1)) AS _sign,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_person_distinct_id\\n]": 0.0024399280000579893, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS person_distinct_id2 ON CLUSTER posthog\\n(\\n team_id Int64,\\n distinct_id VARCHAR,\\n person_id UUID,\\n is_deleted Boolean,\\n version Int64 DEFAULT 1\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n, _partition UInt64\\n) ENGINE = ReplacingMergeTree(version)\\n\\n ORDER BY (team_id, distinct_id)\\n SETTINGS index_granularity = 512\\n ]": 0.002731131000473397, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_person_distinct_id2 ON CLUSTER posthog\\n(\\n team_id Int64,\\n distinct_id VARCHAR,\\n person_id UUID,\\n is_deleted Boolean,\\n version Int64 DEFAULT 1\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_distinct_id_test', 'group1', 'JSONEachRow')\\n]": 0.002548828999806574, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW person_distinct_id2_mv ON CLUSTER posthog\\nTO posthog_test.person_distinct_id2\\nAS SELECT\\nteam_id,\\ndistinct_id,\\nperson_id,\\nis_deleted,\\nversion,\\n_timestamp,\\n_offset,\\n_partition\\nFROM posthog_test.kafka_person_distinct_id2\\n]": 0.002540529000270908, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_plugin_log_entries ON CLUSTER posthog\\n(\\n id UUID,\\n team_id Int64,\\n plugin_id Int64,\\n plugin_config_id Int64,\\n timestamp DateTime64(6, 'UTC'),\\n source VARCHAR,\\n type VARCHAR,\\n message VARCHAR,\\n instance_id UUID\\n \\n) ENGINE = Kafka('kafka:9092', 'plugin_log_entries_test', 'group1', 'JSONEachRow')\\n]": 0.0025744280005710607, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS plugin_log_entries ON CLUSTER posthog\\n(\\n id UUID,\\n team_id Int64,\\n plugin_id Int64,\\n plugin_config_id Int64,\\n timestamp DateTime64(6, 'UTC'),\\n source VARCHAR,\\n type VARCHAR,\\n message VARCHAR,\\n instance_id UUID\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY plugin_id ORDER BY (team_id, id)\\n\\nSETTINGS index_granularity=512\\n]": 0.0029442329996527405, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW plugin_log_entries_mv ON CLUSTER posthog\\nTO posthog_test.plugin_log_entries\\nAS SELECT\\nid,\\nteam_id,\\nplugin_id,\\nplugin_config_id,\\ntimestamp,\\nsource,\\ntype,\\nmessage,\\ninstance_id,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_plugin_log_entries\\n]": 0.002461029000187409, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS session_recording_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n session_id VARCHAR,\\n window_id VARCHAR,\\n snapshot_data VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n , has_full_snapshot BOOLEAN materialized JSONExtractBool(snapshot_data, 'has_full_snapshot')\\n\\n \\n, _timestamp DateTime\\n, _offset UInt64\\n\\n) ENGINE = ReplacingMergeTree(_timestamp)\\nPARTITION BY toYYYYMMDD(timestamp)\\nORDER BY (team_id, toHour(timestamp), session_id, timestamp, uuid)\\n\\nSETTINGS index_granularity=512\\n]": 0.0030223339995245624, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE MATERIALIZED VIEW session_recording_events_mv ON CLUSTER posthog\\nTO posthog_test.session_recording_events\\nAS SELECT\\nuuid,\\ntimestamp,\\nteam_id,\\ndistinct_id,\\nsession_id,\\nwindow_id,\\nsnapshot_data,\\ncreated_at,\\n_timestamp,\\n_offset\\nFROM posthog_test.kafka_session_recording_events\\n]": 0.0024095269995996205, "clickhouse/sql/test/test_schema.py::test_create_table_query_replicated_and_storage[\\nCREATE TABLE IF NOT EXISTS kafka_session_recording_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n session_id VARCHAR,\\n window_id VARCHAR,\\n snapshot_data VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_session_recording_events_test', 'group1', 'JSONEachRow')\\n]": 0.0024596260000180337, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_events_dead_letter_queue ON CLUSTER posthog\\n(\\n id UUID,\\n event_uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n distinct_id VARCHAR,\\n team_id Int64,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC'),\\n ip VARCHAR,\\n site_url VARCHAR,\\n now DateTime64(6, 'UTC'),\\n raw_payload VARCHAR,\\n error_timestamp DateTime64(6, 'UTC'),\\n error_location VARCHAR,\\n error VARCHAR,\\n tags Array(VARCHAR)\\n \\n) ENGINE = Kafka('kafka:9092', 'events_dead_letter_queue_test', 'group1', 'JSONEachRow')\\n]": 0.0028122310004619067, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n event VARCHAR,\\n properties VARCHAR,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n elements_chain VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = \\n Kafka () SETTINGS\\n kafka_broker_list = 'kafka:9092',\\n kafka_topic_list = 'clickhouse_events_proto_test',\\n kafka_group_name = 'group1',\\n kafka_format = 'Protobuf',\\n kafka_schema = 'events:Event',\\n kafka_skip_broken_messages = 100\\n \\n]": 0.0026832290000129433, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_groups ON CLUSTER posthog\\n(\\n group_type_index UInt8,\\n group_key VARCHAR,\\n created_at DateTime64,\\n team_id Int64,\\n group_properties VARCHAR\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_groups_test', 'group1', 'JSONEachRow')\\n]": 0.002731829999902402, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_person ON CLUSTER posthog\\n(\\n id UUID,\\n created_at DateTime64,\\n team_id Int64,\\n properties VARCHAR,\\n is_identified Boolean,\\n is_deleted Boolean DEFAULT 0\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_test', 'group1', 'JSONEachRow')\\n]": 0.002687929999865446, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE kafka_person_distinct_id ON CLUSTER posthog\\n(\\n distinct_id VARCHAR,\\n person_id UUID,\\n team_id Int64,\\n _sign Nullable(Int8),\\n is_deleted Nullable(Int8)\\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_unique_id_test', 'group1', 'JSONEachRow')\\n]": 0.0027057309994233947, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_person_distinct_id2 ON CLUSTER posthog\\n(\\n team_id Int64,\\n distinct_id VARCHAR,\\n person_id UUID,\\n is_deleted Boolean,\\n version Int64 DEFAULT 1\\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_person_distinct_id_test', 'group1', 'JSONEachRow')\\n]": 0.00273703100037892, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_plugin_log_entries ON CLUSTER posthog\\n(\\n id UUID,\\n team_id Int64,\\n plugin_id Int64,\\n plugin_config_id Int64,\\n timestamp DateTime64(6, 'UTC'),\\n source VARCHAR,\\n type VARCHAR,\\n message VARCHAR,\\n instance_id UUID\\n \\n) ENGINE = Kafka('kafka:9092', 'plugin_log_entries_test', 'group1', 'JSONEachRow')\\n]": 0.0027253299999756564, "clickhouse/sql/test/test_schema.py::test_create_kafka_table_with_different_kafka_host[\\nCREATE TABLE IF NOT EXISTS kafka_session_recording_events ON CLUSTER posthog\\n(\\n uuid UUID,\\n timestamp DateTime64(6, 'UTC'),\\n team_id Int64,\\n distinct_id VARCHAR,\\n session_id VARCHAR,\\n window_id VARCHAR,\\n snapshot_data VARCHAR,\\n created_at DateTime64(6, 'UTC')\\n \\n \\n) ENGINE = Kafka('kafka:9092', 'clickhouse_session_recording_events_test', 'group1', 'JSONEachRow')\\n]": 0.0026919300003100943, "clickhouse/sql/test/test_schema.py::test_create_kafka_events_with_disabled_protobuf": 0.0026012289999925997, "clickhouse/test/test_error.py::test_wrap_query_error[error0-AttributeError-Foobar-None]": 0.0021369230003074335, "clickhouse/test/test_error.py::test_wrap_query_error[error1-EstimatedQueryExecutionTimeTooLong-Estimated query execution time (34.5 seconds) is too long.-None]": 0.0035665400005200354, "clickhouse/test/test_error.py::test_wrap_query_error[error2-CHQueryErrorSyntaxError-Code: 62.\\nSyntax error-62]": 0.002301224999882834, "clickhouse/test/test_error.py::test_wrap_query_error[error3-CHQueryErrorUnknown-Code: 9999.\\nSyntax error-9999]": 0.002256424999814044, "clickhouse/views/test/test_clickhouse_retention.py::also_test_with_materialized_columns": 0.0016704190002201358, "clickhouse/views/test/test_clickhouse_trends.py::also_test_with_materialized_columns": 1.8125781269995969} diff --git a/bin/plugin-server b/bin/plugin-server index 75000245ac5da..157e7896fc90b 100755 --- a/bin/plugin-server +++ b/bin/plugin-server @@ -46,7 +46,15 @@ if [ $? -ne 0 ]; then exit 1 fi -[[ -n $DEBUG ]] && cmd="pnpm start:dev" || cmd="node dist/index.js" +if [[ -n $DEBUG ]]; then + if [[ -n $NO_WATCH ]]; then + cmd="pnpm start:devNoWatch" + else + cmd="pnpm start:dev" + fi +else + cmd="node dist/index.js" +fi if [[ -n $NO_RESTART_LOOP ]]; then echo "▶️ Starting plugin server..." diff --git a/codecov.yml b/codecov.yml deleted file mode 100644 index 1da6cfe50e6ba..0000000000000 --- a/codecov.yml +++ /dev/null @@ -1,3 +0,0 @@ -# Disable PR comments for now to not spam PRs. We should still have annotations -# on files -comment: false diff --git a/cypress/e2e/notebooks-creation-and-deletion.cy.ts b/cypress/e2e/notebooks-creation-and-deletion.cy.ts new file mode 100644 index 0000000000000..6206880118a81 --- /dev/null +++ b/cypress/e2e/notebooks-creation-and-deletion.cy.ts @@ -0,0 +1,43 @@ +import { randomString } from '../support/random' + +function visitNotebooksList(): void { + cy.clickNavMenu('dashboards') + cy.location('pathname').should('include', '/dashboard') + cy.get('h1').should('contain', 'Dashboards & Notebooks') + cy.get('li').contains('Notebooks').should('exist').click() +} + +function createNotebookAndFindInList(notebookTitle: string): void { + cy.get('[data-attr="new-notebook"]').click() + cy.get('.NotebookEditor').type(notebookTitle) + + visitNotebooksList() + cy.get('[data-attr="notebooks-search"]').type(notebookTitle) +} + +describe('Notebooks', () => { + beforeEach(() => { + visitNotebooksList() + }) + + it('can create and name a notebook', () => { + const notebookTitle = randomString('My new notebook') + + createNotebookAndFindInList(notebookTitle) + cy.get('[data-attr="notebooks-table"] tbody tr').should('have.length', 1) + }) + + it('can delete a notebook', () => { + const notebookTitle = randomString('My notebook to delete') + + createNotebookAndFindInList(notebookTitle) + + cy.contains('[data-attr="notebooks-table"] tr', notebookTitle).within(() => { + cy.get('[aria-label="more"]').click() + }) + cy.contains('.LemonButton', 'Delete').click() + + // and the table updates + cy.contains('[data-attr="notebooks-table"] tr', notebookTitle).should('not.exist') + }) +}) diff --git a/cypress/e2e/notebooks.cy.ts b/cypress/e2e/notebooks.cy.ts index dfb50b2e50050..7aba143661d54 100644 --- a/cypress/e2e/notebooks.cy.ts +++ b/cypress/e2e/notebooks.cy.ts @@ -7,16 +7,23 @@ describe('Notebooks', () => { 'loadSessionRecordingsList' ) }) + cy.fixture('api/session-recordings/recording.json').then((recording) => { cy.intercept('GET', /api\/projects\/\d+\/session_recordings\/.*\?.*/, { body: recording }).as( 'loadSessionRecording' ) }) + cy.fixture('api/notebooks/notebooks.json').then((notebook) => { cy.intercept('GET', /api\/projects\/\d+\/notebooks\//, { body: notebook }).as('loadNotebooksList') }) + cy.fixture('api/notebooks/notebook.json').then((notebook) => { cy.intercept('GET', /api\/projects\/\d+\/notebooks\/.*\//, { body: notebook }).as('loadNotebook') + // this means saving doesn't work but so what? + cy.intercept('PATCH', /api\/projects\/\d+\/notebooks\/.*\//, (req, res) => { + res.reply(req.body) + }).as('patchNotebook') }) cy.clickNavMenu('dashboards') @@ -34,7 +41,7 @@ describe('Notebooks', () => { }) it('Insertion suggestions can be dismissed', () => { - cy.visit(urls.notebookEdit('h11RoiwV')) + cy.visit(urls.notebook('h11RoiwV')) cy.get('.NotebookEditor').type('{enter}') cy.get('.NotebookRecordingTimestamp--preview').should('exist') @@ -53,4 +60,39 @@ describe('Notebooks', () => { cy.get('.ph-recording.NotebookNode').should('be.visible') cy.get('.NotebookRecordingTimestamp').should('contain.text', '0:00') }) + + describe('text types', () => { + beforeEach(() => { + cy.get('li').contains('Notebooks').should('exist').click() + cy.get('[data-attr="new-notebook"]').click() + // we don't actually get a new notebook because the API is mocked + // so, "exit" the timestamp block we start in + cy.get('.NotebookEditor').type('{esc}{enter}{enter}') + }) + + it('Can add a number list', () => { + cy.get('.NotebookEditor').type('1. the first') + cy.get('.NotebookEditor').type('{enter}') + // no need to type the number now. it should be inserted automatically + cy.get('.NotebookEditor').type('the second') + cy.get('.NotebookEditor').type('{enter}') + cy.get('ol').should('contain.text', 'the first') + cy.get('ol').should('contain.text', 'the second') + // the numbered list auto inserts the next list item + cy.get('.NotebookEditor ol li').should('have.length', 3) + }) + + it('Can add bold', () => { + cy.get('.NotebookEditor').type('**bold**') + cy.get('.NotebookEditor p').last().should('contain.html', 'bold') + }) + + it('Can add bullet list', () => { + cy.get('.NotebookEditor').type('* the first{enter}the second{enter}') + cy.get('ul').should('contain.text', 'the first') + cy.get('ul').should('contain.text', 'the second') + // the list auto inserts the next list item + cy.get('.NotebookEditor ul li').should('have.length', 3) + }) + }) }) diff --git a/docker-compose.hobby.yml b/docker-compose.hobby.yml index cc61b627e0a0c..bf63efa21e0b2 100644 --- a/docker-compose.hobby.yml +++ b/docker-compose.hobby.yml @@ -13,8 +13,11 @@ services: extends: file: docker-compose.base.yml service: db + # Pin to postgres 12 until we have a process for pg_upgrade to postgres 15 for exsisting installations + image: ${DOCKER_REGISTRY_PREFIX:-}postgres:12-alpine volumes: - postgres-data:/var/lib/postgresql/data + redis: extends: file: docker-compose.base.yml diff --git a/ee/api/test/base.py b/ee/api/test/base.py index a00ae9f89cc2b..05691d8c9153d 100644 --- a/ee/api/test/base.py +++ b/ee/api/test/base.py @@ -1,7 +1,7 @@ import datetime from typing import Dict, Optional, cast -import pytz +from zoneinfo import ZoneInfo from ee.api.test.fixtures.available_product_features import AVAILABLE_PRODUCT_FEATURES from ee.models.license import License, LicenseManager @@ -30,7 +30,7 @@ def setUpTestData(cls): cls.license = super(LicenseManager, cast(LicenseManager, License.objects)).create( key=cls.CONFIG_LICENSE_KEY, plan=cls.CONFIG_LICENSE_PLAN, - valid_until=datetime.datetime(2038, 1, 19, 3, 14, 7, tzinfo=pytz.UTC), + valid_until=datetime.datetime(2038, 1, 19, 3, 14, 7, tzinfo=ZoneInfo("UTC")), ) if hasattr(cls, "organization") and cls.organization: # type: ignore cls.organization.available_product_features = AVAILABLE_PRODUCT_FEATURES # type: ignore diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 19ef6d7052bce..0b5297e9d9261 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -4,7 +4,7 @@ from uuid import uuid4 import jwt -import pytz +from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils.timezone import now from freezegun import freeze_time @@ -377,13 +377,13 @@ def test_license_is_updated_on_billing_load(self, mock_request): self.client.get("/api/billing-v2") self.license.refresh_from_db() - self.license.valid_until = datetime(2022, 1, 2, 0, 0, 0, tzinfo=pytz.UTC) + self.license.valid_until = datetime(2022, 1, 2, 0, 0, 0, tzinfo=ZoneInfo("UTC")) self.license.save() assert self.license.plan == "scale" TEST_clear_instance_license_cache() license = get_cached_instance_license() assert license.plan == "scale" - assert license.valid_until == datetime(2022, 1, 2, 0, 0, 0, tzinfo=pytz.UTC) + assert license.valid_until == datetime(2022, 1, 2, 0, 0, 0, tzinfo=ZoneInfo("UTC")) mock_request.return_value.json.return_value = { "license": { @@ -396,7 +396,7 @@ def test_license_is_updated_on_billing_load(self, mock_request): license = get_cached_instance_license() assert license.plan == "enterprise" # Should be extended by 30 days - assert license.valid_until == datetime(2022, 1, 31, 12, 0, 0, tzinfo=pytz.UTC) + assert license.valid_until == datetime(2022, 1, 31, 12, 0, 0, tzinfo=ZoneInfo("UTC")) @patch("ee.api.billing.requests.get") def test_organization_available_features_updated_if_different(self, mock_request): diff --git a/ee/api/test/test_license.py b/ee/api/test/test_license.py index 55e310e3bd5b2..813035def9546 100644 --- a/ee/api/test/test_license.py +++ b/ee/api/test/test_license.py @@ -2,7 +2,7 @@ from unittest.mock import Mock, patch import pytest -import pytz +from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils import timezone from django.utils.timezone import now @@ -27,7 +27,7 @@ def test_can_list_and_retrieve_licenses(self): self.assertEqual(response_data["results"][0]["key"], "12345::67890") self.assertEqual( response_data["results"][0]["valid_until"], - timezone.datetime(2038, 1, 19, 3, 14, 7, tzinfo=pytz.UTC).isoformat().replace("+00:00", "Z"), + timezone.datetime(2038, 1, 19, 3, 14, 7, tzinfo=ZoneInfo("UTC")).isoformat().replace("+00:00", "Z"), ) retrieve_response = self.client.get(f"/api/license/{response_data['results'][0]['id']}") diff --git a/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr b/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr index d8c1a92e6e35f..955c3b33da9d3 100644 --- a/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr +++ b/ee/clickhouse/models/test/__snapshots__/test_cohort.ambr @@ -83,7 +83,7 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_15_level_level_0_level_0_level_0_0 + AND event = '$pageview') > 0 AS performed_event_condition_17_level_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -113,7 +113,7 @@ HAVING max(is_deleted) = 0 AND (((((NOT has(['something1'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', ''))))))))) person ON person.person_id = behavior_query.person_id WHERE 1 = 1 - AND ((((performed_event_condition_15_level_level_0_level_0_level_0_0)))) ) as person + AND ((((performed_event_condition_17_level_level_0_level_0_level_0_0)))) ) as person UNION ALL SELECT person_id, cohort_id, @@ -148,7 +148,7 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_17_level_level_0_level_0_level_0_0 + AND event = '$pageview') > 0 AS performed_event_condition_19_level_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -178,7 +178,7 @@ HAVING max(is_deleted) = 0 AND (((((NOT has(['something1'], replaceRegexpAll(JSONExtractRaw(argMax(person.properties, version), '$some_prop'), '^"|"$', ''))))))))) person ON person.person_id = behavior_query.person_id WHERE 1 = 1 - AND ((((performed_event_condition_17_level_level_0_level_0_level_0_0)))) ) )) + AND ((((performed_event_condition_19_level_level_0_level_0_level_0_0)))) ) )) ' --- # name: TestCohort.test_cohortpeople_with_not_in_cohort_operator_for_behavioural_cohorts @@ -195,7 +195,7 @@ FROM (SELECT pdi.person_id AS person_id, minIf(timestamp, event = 'signup') >= now() - INTERVAL 15 day - AND minIf(timestamp, event = 'signup') < now() as first_time_condition_18_level_level_0_level_0_0 + AND minIf(timestamp, event = 'signup') < now() as first_time_condition_20_level_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -208,7 +208,7 @@ AND event IN ['signup'] GROUP BY person_id) behavior_query WHERE 1 = 1 - AND (((first_time_condition_18_level_level_0_level_0_0))) ) as person + AND (((first_time_condition_20_level_level_0_level_0_0))) ) as person UNION ALL SELECT person_id, cohort_id, @@ -237,9 +237,9 @@ (SELECT pdi.person_id AS person_id, countIf(timestamp > now() - INTERVAL 2 year AND timestamp < now() - AND event = '$pageview') > 0 AS performed_event_condition_19_level_level_0_level_0_level_0_0, + AND event = '$pageview') > 0 AS performed_event_condition_21_level_level_0_level_0_level_0_0, minIf(timestamp, event = 'signup') >= now() - INTERVAL 15 day - AND minIf(timestamp, event = 'signup') < now() as first_time_condition_19_level_level_0_level_1_level_0_level_0_level_0_0 + AND minIf(timestamp, event = 'signup') < now() as first_time_condition_21_level_level_0_level_1_level_0_level_0_level_0_0 FROM events e INNER JOIN (SELECT distinct_id, @@ -252,8 +252,8 @@ AND event IN ['$pageview', 'signup'] GROUP BY person_id) behavior_query WHERE 1 = 1 - AND ((((performed_event_condition_19_level_level_0_level_0_level_0_0)) - AND ((((NOT first_time_condition_19_level_level_0_level_1_level_0_level_0_level_0_0)))))) ) as person + AND ((((performed_event_condition_21_level_level_0_level_0_level_0_0)) + AND ((((NOT first_time_condition_21_level_level_0_level_1_level_0_level_0_level_0_0)))))) ) as person UNION ALL SELECT person_id, cohort_id, diff --git a/ee/clickhouse/models/test/__snapshots__/test_property.ambr b/ee/clickhouse/models/test/__snapshots__/test_property.ambr index d27396834cf99..b3f6f049cf619 100644 --- a/ee/clickhouse/models/test/__snapshots__/test_property.ambr +++ b/ee/clickhouse/models/test/__snapshots__/test_property.ambr @@ -146,7 +146,7 @@ )) ', { - 'global_cohort_id_0': 47, + 'global_cohort_id_0': 1, 'global_version_0': None, }, ) diff --git a/ee/clickhouse/queries/experiments/funnel_experiment_result.py b/ee/clickhouse/queries/experiments/funnel_experiment_result.py index ef2985211dc88..b41ba454e2f0c 100644 --- a/ee/clickhouse/queries/experiments/funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/funnel_experiment_result.py @@ -1,8 +1,8 @@ from dataclasses import asdict, dataclass from datetime import datetime from typing import List, Optional, Tuple, Type +from zoneinfo import ZoneInfo -import pytz from numpy.random import default_rng from rest_framework.exceptions import ValidationError @@ -57,7 +57,6 @@ def __init__( experiment_end_date: Optional[datetime] = None, funnel_class: Type[ClickhouseFunnel] = ClickhouseFunnel, ): - breakdown_key = f"$feature/{feature_flag.key}" self.variants = [variant["key"] for variant in feature_flag.variants] @@ -65,9 +64,9 @@ def __init__( # while start and end date are in UTC. # so we need to convert them to the project timezone if team.timezone: - start_date_in_project_timezone = experiment_start_date.astimezone(pytz.timezone(team.timezone)) + start_date_in_project_timezone = experiment_start_date.astimezone(ZoneInfo(team.timezone)) end_date_in_project_timezone = ( - experiment_end_date.astimezone(pytz.timezone(team.timezone)) if experiment_end_date else None + experiment_end_date.astimezone(ZoneInfo(team.timezone)) if experiment_end_date else None ) query_filter = filter.shallow_clone( diff --git a/ee/clickhouse/queries/experiments/secondary_experiment_result.py b/ee/clickhouse/queries/experiments/secondary_experiment_result.py index a2d1c831e0886..286d408b13d0d 100644 --- a/ee/clickhouse/queries/experiments/secondary_experiment_result.py +++ b/ee/clickhouse/queries/experiments/secondary_experiment_result.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import Dict, Optional +from zoneinfo import ZoneInfo -import pytz from rest_framework.exceptions import ValidationError from ee.clickhouse.queries.experiments.trend_experiment_result import ( uses_count_per_property_value_aggregation, @@ -32,7 +32,6 @@ def __init__( experiment_start_date: datetime, experiment_end_date: Optional[datetime] = None, ): - breakdown_key = f"$feature/{feature_flag.key}" self.variants = [variant["key"] for variant in feature_flag.variants] @@ -40,9 +39,9 @@ def __init__( # while start and end date are in UTC. # so we need to convert them to the project timezone if team.timezone: - start_date_in_project_timezone = experiment_start_date.astimezone(pytz.timezone(team.timezone)) + start_date_in_project_timezone = experiment_start_date.astimezone(ZoneInfo(team.timezone)) end_date_in_project_timezone = ( - experiment_end_date.astimezone(pytz.timezone(team.timezone)) if experiment_end_date else None + experiment_end_date.astimezone(ZoneInfo(team.timezone)) if experiment_end_date else None ) query_filter = filter.shallow_clone( @@ -67,7 +66,6 @@ def __init__( self.query_filter = query_filter def get_results(self): - if self.query_filter.insight == INSIGHT_TRENDS: trend_results = Trends().run(self.query_filter, self.team) variants = self.get_trend_count_data_for_variants(trend_results) diff --git a/ee/clickhouse/queries/experiments/trend_experiment_result.py b/ee/clickhouse/queries/experiments/trend_experiment_result.py index d30d7a2339f38..ec03370365188 100644 --- a/ee/clickhouse/queries/experiments/trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/trend_experiment_result.py @@ -3,8 +3,8 @@ from functools import lru_cache from math import exp, lgamma, log from typing import List, Optional, Tuple, Type +from zoneinfo import ZoneInfo -import pytz from numpy.random import default_rng from rest_framework.exceptions import ValidationError @@ -77,7 +77,6 @@ def __init__( trend_class: Type[Trends] = Trends, custom_exposure_filter: Optional[Filter] = None, ): - breakdown_key = f"$feature/{feature_flag.key}" variants = [variant["key"] for variant in feature_flag.variants] @@ -85,9 +84,9 @@ def __init__( # while start and end date are in UTC. # so we need to convert them to the project timezone if team.timezone: - start_date_in_project_timezone = experiment_start_date.astimezone(pytz.timezone(team.timezone)) + start_date_in_project_timezone = experiment_start_date.astimezone(ZoneInfo(team.timezone)) end_date_in_project_timezone = ( - experiment_end_date.astimezone(pytz.timezone(team.timezone)) if experiment_end_date else None + experiment_end_date.astimezone(ZoneInfo(team.timezone)) if experiment_end_date else None ) count_per_user_aggregation = uses_count_per_user_aggregation(filter) diff --git a/ee/clickhouse/queries/test/test_util.py b/ee/clickhouse/queries/test/test_util.py index 131befb6f40ac..ff102765255d3 100644 --- a/ee/clickhouse/queries/test/test_util.py +++ b/ee/clickhouse/queries/test/test_util.py @@ -1,6 +1,6 @@ from datetime import datetime, timedelta -import pytz +from zoneinfo import ZoneInfo from freezegun.api import freeze_time from posthog.client import sync_execute @@ -18,19 +18,19 @@ def test_get_earliest_timestamp(db, team): _create_event(team=team, event="sign up", distinct_id="1", timestamp="2020-01-04T14:10:00Z") _create_event(team=team, event="sign up", distinct_id="1", timestamp="2020-01-06T14:10:00Z") - assert get_earliest_timestamp(team.id) == datetime(2020, 1, 4, 14, 10, tzinfo=pytz.UTC) + assert get_earliest_timestamp(team.id) == datetime(2020, 1, 4, 14, 10, tzinfo=ZoneInfo("UTC")) frozen_time.tick(timedelta(seconds=1)) _create_event(team=team, event="sign up", distinct_id="1", timestamp="1984-01-06T14:10:00Z") _create_event(team=team, event="sign up", distinct_id="1", timestamp="2014-01-01T01:00:00Z") _create_event(team=team, event="sign up", distinct_id="1", timestamp="2015-01-01T01:00:00Z") - assert get_earliest_timestamp(team.id) == datetime(2015, 1, 1, 1, tzinfo=pytz.UTC) + assert get_earliest_timestamp(team.id) == datetime(2015, 1, 1, 1, tzinfo=ZoneInfo("UTC")) @freeze_time("2021-01-21") def test_get_earliest_timestamp_with_no_events(db, team): - assert get_earliest_timestamp(team.id) == datetime(2021, 1, 14, tzinfo=pytz.UTC) + assert get_earliest_timestamp(team.id) == datetime(2021, 1, 14, tzinfo=ZoneInfo("UTC")) def test_parse_breakdown_cohort_query(db, team): diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr index 76b856caa0287..f312dde127a84 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr @@ -1,6 +1,6 @@ # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results ' - /* user_id:51 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:138 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -12,50 +12,78 @@ --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.2 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test', 'ablahebf', ''] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['control', 'test', 'ablahebf', '']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) FROM - (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, count(*) as count FROM events e WHERE team_id = 2 - AND event = '$pageview' + AND event IN ['$pageleave_funnel', '$pageview_funnel'] AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') GROUP BY value @@ -64,6 +92,78 @@ OFFSET 0) ' --- +# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.4 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview_funnel', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave_funnel', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave_funnel', '$pageview_funnel'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- # name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results.5 ' /* user_id:0 request:_snapshot_ */ diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr index 15bbb8312a341..be61b4ccc3d33 100644 --- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr +++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiments.ambr @@ -1,25 +1,91 @@ # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results ' - /* user_id:58 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results.2 @@ -137,54 +203,6 @@ ' --- # name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones - ' - /* user_id:59 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -202,7 +220,7 @@ OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.1 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -274,31 +292,7 @@ GROUP BY prop ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants - ' - /* user_id:61 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.2 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -310,7 +304,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.3 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -322,7 +316,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.4 ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -332,15 +326,15 @@ FROM events e WHERE team_id = 2 AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') GROUP BY value ORDER BY count DESC, value DESC LIMIT 25 OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_and_events_out_of_time_range_timezones.5 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -375,7 +369,7 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + if(has([['test'], ['control']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop @@ -400,8 +394,8 @@ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id WHERE team_id = 2 AND event IN ['$pageleave', '$pageview'] - AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') - AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'Europe/Amsterdam') >= toDateTime('2020-01-01 14:20:21', 'Europe/Amsterdam') + AND toTimeZone(timestamp, 'Europe/Amsterdam') <= toDateTime('2020-01-06 10:00:00', 'Europe/Amsterdam') AND (step_0 = 1 OR step_1 = 1) ))) WHERE step_0 = 1 )) @@ -412,55 +406,7 @@ GROUP BY prop ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation - ' - /* user_id:62 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.3 - ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; - ' ---- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.4 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ' /* user_id:0 request:_snapshot_ */ SELECT groupArray(value) @@ -478,7 +424,7 @@ OFFSET 0) ' --- -# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.5 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ' /* user_id:0 request:_snapshot_ */ SELECT countIf(steps = 1) step_1, @@ -513,13 +459,13 @@ min(latest_1) over (PARTITION by aggregation_target, prop ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , - if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop FROM (SELECT *, if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop FROM (SELECT e.timestamp as timestamp, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as aggregation_target, pdi.person_id as person_id , if(event = '$pageview', 1, 0) as step_0, if(step_0 = 1, timestamp, null) as latest_0, @@ -550,9 +496,9 @@ GROUP BY prop ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ' - /* user_id:65 celery:posthog.celery.sync_insight_caching_state */ + /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -562,7 +508,7 @@ ORDER BY age; ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ' /* celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, @@ -574,28 +520,447 @@ ORDER BY age; ' --- -# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.5 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([[''], ['test_1'], ['test'], ['control'], ['unknown_3'], ['unknown_2'], ['unknown_1'], ['test_2']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + pdi.person_id as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.1 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.2 + ' + /* celery:posthog.celery.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id ORDER BY age; ' --- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.3 + ' + /* celery:posthog.celery.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.4 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestFunnelExperimentResults.test_experiment_flow_with_event_results_with_hogql_aggregation.5 + ' + /* user_id:0 request:_snapshot_ */ + SELECT countIf(steps = 1) step_1, + countIf(steps = 2) step_2, + avg(step_1_average_conversion_time_inner) step_1_average_conversion_time, + median(step_1_median_conversion_time_inner) step_1_median_conversion_time, + prop + FROM + (SELECT aggregation_target, + steps, + avg(step_1_conversion_time) step_1_average_conversion_time_inner, + median(step_1_conversion_time) step_1_median_conversion_time_inner , + prop + FROM + (SELECT aggregation_target, + steps, + max(steps) over (PARTITION BY aggregation_target, + prop) as max_steps, + step_1_conversion_time , + prop + FROM + (SELECT *, + if(latest_0 <= latest_1 + AND latest_1 <= latest_0 + INTERVAL 14 DAY, 2, 1) AS steps , + if(isNotNull(latest_1) + AND latest_1 <= latest_0 + INTERVAL 14 DAY, dateDiff('second', toDateTime(latest_0), toDateTime(latest_1)), NULL) step_1_conversion_time, + prop + FROM + (SELECT aggregation_target, timestamp, step_0, + latest_0, + step_1, + min(latest_1) over (PARTITION by aggregation_target, + prop + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1 , + if(has([['test'], ['control'], ['']], prop), prop, ['Other']) as prop + FROM + (SELECT *, + if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) as prop + FROM + (SELECT e.timestamp as timestamp, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(properties, '$account_id'), ''), 'null'), '^"|"$', '') as aggregation_target, + pdi.person_id as person_id , + if(event = '$pageview', 1, 0) as step_0, + if(step_0 = 1, timestamp, null) as latest_0, + if(event = '$pageleave', 1, 0) as step_1, + if(step_1 = 1, timestamp, null) as latest_1, + array(replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '')) AS prop_basic, + prop_basic as prop, + argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) over (PARTITION by aggregation_target) as prop_vals + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id + WHERE team_id = 2 + AND event IN ['$pageleave', '$pageview'] + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (step_0 = 1 + OR step_1 = 1) ))) + WHERE step_0 = 1 )) + GROUP BY aggregation_target, + steps, + prop + HAVING steps = max_steps) + GROUP BY prop + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.1 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['test', 'control'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview' + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['test', 'control']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value + ' +--- +# name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.2 + ' + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) + ' +--- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.3 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(DISTINCT person_id) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + breakdown_value + FROM + (SELECT person_id, + min(timestamp) as timestamp, + breakdown_value + FROM + (SELECT pdi.person_id as person_id, timestamp, replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') as breakdown_value + FROM events e + INNER JOIN + (SELECT distinct_id, + argMax(person_id, version) as person_id + FROM person_distinct_id2 + WHERE team_id = 2 + GROUP BY distinct_id + HAVING argMax(is_deleted, version) = 0) as pdi ON events.distinct_id = pdi.distinct_id + WHERE e.team_id = 2 + AND event = '$feature_flag_called' + AND (has(['control', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') in (['control', 'test']) ) + GROUP BY person_id, + breakdown_value) AS pdi + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results.4 @@ -749,50 +1114,97 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants ' - /* user_id:66 celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$pageview1' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.1 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(day_start) as date, + groupArray(count) AS total, + breakdown_value + FROM + (SELECT SUM(total) as count, + day_start, + breakdown_value + FROM + (SELECT * + FROM + (SELECT toUInt16(0) AS total, + ticks.day_start as day_start, + breakdown_value + FROM + (SELECT toStartOfDay(toDateTime('2020-01-06 00:00:00', 'UTC')) - toIntervalDay(number) as day_start + FROM numbers(6) + UNION ALL SELECT toStartOfDay(toDateTime('2020-01-01 00:00:00', 'UTC')) as day_start) as ticks + CROSS JOIN + (SELECT breakdown_value + FROM + (SELECT ['control', 'test_1', 'test_2'] as breakdown_value) ARRAY + JOIN breakdown_value) as sec + ORDER BY breakdown_value, + day_start + UNION ALL SELECT count(*) as total, + toStartOfDay(toTimeZone(toDateTime(timestamp, 'UTC'), 'UTC')) as day_start, + replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') as breakdown_value + FROM events e + WHERE e.team_id = 2 + AND event = '$pageview1' + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature/a-b-test'), '^"|"$', ''))) + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND replaceRegexpAll(JSONExtractRaw(properties, '$feature/a-b-test'), '^"|"$', '') in (['control', 'test_1', 'test_2']) + GROUP BY day_start, + breakdown_value)) + GROUP BY day_start, + breakdown_value + ORDER BY breakdown_value, + day_start) + GROUP BY breakdown_value + ORDER BY breakdown_value ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.2 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT groupArray(value) + FROM + (SELECT replaceRegexpAll(JSONExtractRaw(properties, '$feature_flag_response'), '^"|"$', '') AS value, + count(*) as count + FROM events e + WHERE team_id = 2 + AND event = '$feature_flag_called' + AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC') + AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-06 00:00:00', 'UTC') + AND (has(['control', 'test_1', 'test_2', 'test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag_response'), '^"|"$', '')) + AND has(['a-b-test'], replaceRegexpAll(JSONExtractRaw(e.properties, '$feature_flag'), '^"|"$', ''))) + GROUP BY value + ORDER BY count DESC, value DESC + LIMIT 25 + OFFSET 0) ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.3 ' - /* celery:posthog.celery.sync_insight_caching_state */ - SELECT team_id, - date_diff('second', max(timestamp), now()) AS age - FROM events - WHERE timestamp > date_sub(DAY, 3, now()) - AND timestamp < now() - GROUP BY team_id - ORDER BY age; + /* user_id:0 request:_snapshot_ */ + SELECT [now()] AS date, + [0] AS total, + '' AS breakdown_value + LIMIT 0 ' --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_for_three_test_variants.4 @@ -892,7 +1304,7 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_out_of_timerange_timezone ' - /* user_id:68 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:1 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events @@ -1089,7 +1501,7 @@ --- # name: ClickhouseTestTrendExperimentResults.test_experiment_flow_with_event_results_with_hogql_filter ' - /* user_id:70 celery:posthog.celery.sync_insight_caching_state */ + /* user_id:3 celery:posthog.celery.sync_insight_caching_state */ SELECT team_id, date_diff('second', max(timestamp), now()) AS age FROM events diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 9f86ee3fe1d51..07764b83845d8 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -777,7 +777,7 @@ def test_used_in_experiment_is_populated_correctly_for_feature_flag_list(self) - ).json() # TODO: Make sure permission bool doesn't cause n + 1 - with self.assertNumQueries(11): + with self.assertNumQueries(12): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) result = response.json() diff --git a/ee/tasks/auto_rollback_feature_flag.py b/ee/tasks/auto_rollback_feature_flag.py index c79b926831a1e..249c556a4fd8a 100644 --- a/ee/tasks/auto_rollback_feature_flag.py +++ b/ee/tasks/auto_rollback_feature_flag.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from typing import Dict +from zoneinfo import ZoneInfo -import pytz from celery import shared_task from ee.api.sentry_stats import get_stats_for_timerange @@ -31,7 +31,7 @@ def check_feature_flag_rollback_conditions(feature_flag_id: int) -> None: def calculate_rolling_average(threshold_metric: Dict, team: Team, timezone: str) -> float: - curr = datetime.now(tz=pytz.timezone(timezone)) + curr = datetime.now(tz=ZoneInfo(timezone)) rolling_average_days = 7 diff --git a/ee/tasks/test/subscriptions/subscriptions_test_factory.py b/ee/tasks/test/subscriptions/subscriptions_test_factory.py index deb7e05536553..ef459c44e981a 100644 --- a/ee/tasks/test/subscriptions/subscriptions_test_factory.py +++ b/ee/tasks/test/subscriptions/subscriptions_test_factory.py @@ -1,7 +1,7 @@ from datetime import datetime from typing import Any -import pytz +from zoneinfo import ZoneInfo from posthog.models.subscription import Subscription @@ -12,7 +12,7 @@ def create_subscription(**kwargs: Any) -> Subscription: target_value="test1@posthog.com,test2@posthog.com", frequency="daily", interval=1, - start_date=datetime(2022, 1, 1, 9, 0).replace(tzinfo=pytz.UTC), + start_date=datetime(2022, 1, 1, 9, 0).replace(tzinfo=ZoneInfo("UTC")), ) payload.update(kwargs) diff --git a/ee/tasks/test/subscriptions/test_subscriptions.py b/ee/tasks/test/subscriptions/test_subscriptions.py index d4e5d0e8daa95..8d5d0158678d7 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions.py +++ b/ee/tasks/test/subscriptions/test_subscriptions.py @@ -2,7 +2,7 @@ from typing import List from unittest.mock import MagicMock, call, patch -import pytz +from zoneinfo import ZoneInfo from freezegun import freeze_time from ee.tasks.subscriptions import ( @@ -58,9 +58,9 @@ def test_subscription_delivery_scheduling( create_subscription(team=self.team, dashboard=self.dashboard, created_by=self.user, deleted=True), ] # Modify a subscription to have its target time at least an hour ahead - subscriptions[2].start_date = datetime(2022, 1, 1, 10, 0).replace(tzinfo=pytz.UTC) + subscriptions[2].start_date = datetime(2022, 1, 1, 10, 0).replace(tzinfo=ZoneInfo("UTC")) subscriptions[2].save() - assert subscriptions[2].next_delivery_date == datetime(2022, 2, 2, 10, 0).replace(tzinfo=pytz.UTC) + assert subscriptions[2].next_delivery_date == datetime(2022, 2, 2, 10, 0).replace(tzinfo=ZoneInfo("UTC")) schedule_all_subscriptions() diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png index f9063f35a84fa..23a9edff296e8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png b/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png new file mode 100644 index 0000000000000..00ac16d82c920 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--bullet-list.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--headings.png b/frontend/__snapshots__/scenes-app-notebooks--headings.png new file mode 100644 index 0000000000000..8df9288063084 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--headings.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebooks-template-introduction.png b/frontend/__snapshots__/scenes-app-notebooks--notebooks-template-introduction.png new file mode 100644 index 0000000000000..b6466dd921cf7 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--notebooks-template-introduction.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png b/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png new file mode 100644 index 0000000000000..76256d08a1d61 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--numbered-list.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png new file mode 100644 index 0000000000000..768016f6149e4 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--text-formats.png b/frontend/__snapshots__/scenes-app-notebooks--text-formats.png new file mode 100644 index 0000000000000..ecf666a2c4f96 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks--text-formats.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png b/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png index 65c15401073a4..bc2f358a8286c 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png and b/frontend/__snapshots__/scenes-app-notebooks--text-only-notebook.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png new file mode 100644 index 0000000000000..72044664032ff Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--closed-popover-state.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png new file mode 100644 index 0000000000000..ca05fd2fff918 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--default.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png new file mode 100644 index 0000000000000..17c750c0c42d7 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-existing-containing-notebooks.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png new file mode 100644 index 0000000000000..ca05fd2fff918 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-no-notebooks.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png new file mode 100644 index 0000000000000..7f2f047e58950 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response-closed-popover.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png new file mode 100644 index 0000000000000..2e25a8113f1d1 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-notebooks-components-notebook-select-button--with-slow-network-response.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png b/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png new file mode 100644 index 0000000000000..f4060d584e979 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-recordings--recent-recordings.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png index 2f1c8b9db903b..f6899d7e36d1d 100644 Binary files a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png and b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-view.png b/frontend/__snapshots__/scenes-app-surveys--survey-view.png index 418de31110ebf..4fd6630f9ddd5 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-view.png and b/frontend/__snapshots__/scenes-app-surveys--survey-view.png differ diff --git a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png index b533f8a57619e..e34fe137f3088 100644 Binary files a/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png and b/frontend/__snapshots__/scenes-other-login--cloud-with-google-login-enforcement.png differ diff --git a/frontend/src/layout/navigation/TopBar/TopBar.tsx b/frontend/src/layout/navigation/TopBar/TopBar.tsx index cf73ee60f4b8b..4c51c2453ae40 100644 --- a/frontend/src/layout/navigation/TopBar/TopBar.tsx +++ b/frontend/src/layout/navigation/TopBar/TopBar.tsx @@ -30,6 +30,26 @@ export function TopBar(): JSX.Element { const { hideInviteModal } = useActions(inviteLogic) const { groupNamesTaxonomicTypes } = useValues(groupsModel) const { featureFlags } = useValues(featureFlagLogic) + + const hasNotebooks = !!featureFlags[FEATURE_FLAGS.NOTEBOOKS] + + const groupTypes = [ + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Persons, + TaxonomicFilterGroupType.Actions, + TaxonomicFilterGroupType.Cohorts, + TaxonomicFilterGroupType.Insights, + TaxonomicFilterGroupType.FeatureFlags, + TaxonomicFilterGroupType.Plugins, + TaxonomicFilterGroupType.Experiments, + TaxonomicFilterGroupType.Dashboards, + ...groupNamesTaxonomicTypes, + ] + + if (hasNotebooks) { + groupTypes.push(TaxonomicFilterGroupType.Notebooks) + } + return ( <> @@ -48,26 +68,12 @@ export function TopBar(): JSX.Element {
- +
- {!!featureFlags[FEATURE_FLAGS.NOTEBOOKS] && } + {hasNotebooks && } diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 0d49af4fc31ec..c56bf0bc8086e 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -47,8 +47,8 @@ import { DataWarehouseViewLink, BatchExportConfiguration, BatchExportRun, - NotebookNodeType, UserBasicType, + NotebookNodeResource, } from '~/types' import { getCurrentOrganizationId, getCurrentTeamId } from './utils/logics' import { CheckboxValueType } from 'antd/lib/checkbox/Group' @@ -1321,12 +1321,12 @@ const api = { }, async update( notebookId: NotebookType['short_id'], - data: Pick + data: Pick ): Promise { return await new ApiRequest().notebook(notebookId).update({ data }) }, async list( - contains?: { type: NotebookNodeType; attrs: Record }[], + contains?: NotebookNodeResource[], createdBy?: UserBasicType['uuid'], search?: string ): Promise> { @@ -1348,11 +1348,11 @@ const api = { q = { ...q, created_by: createdBy } } if (search) { - q = { ...q, s: search } + q = { ...q, search: search } } return await apiRequest.withQueryString(q).get() }, - async create(data?: Pick): Promise { + async create(data?: Pick): Promise { return await new ApiRequest().notebooks().create({ data }) }, async delete(notebookId: NotebookType['short_id']): Promise { diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.scss b/frontend/src/lib/components/Cards/TextCard/TextCard.scss index 0652dd7fa64bb..f88af17286e05 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.scss +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.scss @@ -9,13 +9,13 @@ overflow-y: auto; ul { - list-style: disc; - padding-inline-start: 1.5em; + list-style-type: disc; + list-style-position: inside; } ol { - list-style: numeric; - padding-inline-start: 1.5em; + list-style-type: numeric; + list-style-position: inside; } img { diff --git a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx index 9b2a9d8705ddf..5c54f515dbea8 100644 --- a/frontend/src/lib/components/Cards/TextCard/TextCard.tsx +++ b/frontend/src/lib/components/Cards/TextCard/TextCard.tsx @@ -24,15 +24,15 @@ interface TextCardProps extends React.HTMLAttributes, Resizeable showEditingControls?: boolean } -interface TextCardBodyProps extends Pick, 'style'> { +interface TextCardBodyProps extends Pick, 'style' | 'className'> { text: string closeDetails?: () => void } -export function TextContent({ text, closeDetails, style }: TextCardBodyProps): JSX.Element { +export function TextContent({ text, closeDetails, style, className }: TextCardBodyProps): JSX.Element { return ( // eslint-disable-next-line react/forbid-dom-props -
closeDetails?.()} style={style}> +
closeDetails?.()} style={style}> {text}
) diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.stories.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.stories.tsx index 9106dabbf4599..63e648a6b07f2 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.stories.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.stories.tsx @@ -19,6 +19,7 @@ export const TheHedgehog: StoryFn = () => { // eslint-disable-next-line no-console console.log('should close') }} + isDarkModeOn={false} />
) diff --git a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx index 60b23c8974732..c777e1d06c0a9 100644 --- a/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx +++ b/frontend/src/lib/components/HedgehogBuddy/HedgehogBuddy.tsx @@ -390,17 +390,18 @@ export function HedgehogBuddy({ onClick: _onClick, onPositionChange, popoverOverlay, + isDarkModeOn, }: { actorRef?: MutableRefObject onClose: () => void onClick?: () => void onPositionChange?: (actor: HedgehogActor) => void popoverOverlay?: React.ReactNode + // passed in because toolbar needs to check this differently than the app + isDarkModeOn: boolean }): JSX.Element { const actorRef = useRef() - const { isDarkModeOn } = useValues(themeLogic) - if (!actorRef.current) { actorRef.current = new HedgehogActor() if (_actorRef) { @@ -538,6 +539,11 @@ export function HedgehogBuddy({ export function HedgehogBuddyWithLogic(): JSX.Element { const { hedgehogModeEnabled } = useValues(hedgehogbuddyLogic) const { setHedgehogModeEnabled } = useActions(hedgehogbuddyLogic) + const { isDarkModeOn } = useValues(themeLogic) - return hedgehogModeEnabled ? setHedgehogModeEnabled(false)} /> : <> + return hedgehogModeEnabled ? ( + setHedgehogModeEnabled(false)} isDarkModeOn={isDarkModeOn} /> + ) : ( + <> + ) } diff --git a/frontend/src/lib/components/PropertyFilters/components/PropertyValue.tsx b/frontend/src/lib/components/PropertyFilters/components/PropertyValue.tsx index eed63c5ca7258..3d07a4bed1261 100644 --- a/frontend/src/lib/components/PropertyFilters/components/PropertyValue.tsx +++ b/frontend/src/lib/components/PropertyFilters/components/PropertyValue.tsx @@ -109,9 +109,9 @@ export function PropertyValue({ const commonInputProps = { onSearch: (newInput: string) => { - setInput(newInput) + setInput(newInput.trim()) if (!Object.keys(options).includes(newInput) && !(operator && isOperatorFlag(operator))) { - load(newInput) + load(newInput.trim()) } }, ['data-attr']: 'prop-val', @@ -212,12 +212,6 @@ export function PropertyValue({ setInput(option.title) setValue(toString(val)) }} - onKeyDown={(e) => { - if (e.key === 'Enter') { - setInput(toString(input)) - setValue(toString(input)) - } - }} ref={autoCompleteRef} > {[ diff --git a/frontend/src/lib/components/Subscriptions/views/EditSubscription.tsx b/frontend/src/lib/components/Subscriptions/views/EditSubscription.tsx index 2550bcff4d546..7034d975c324b 100644 --- a/frontend/src/lib/components/Subscriptions/views/EditSubscription.tsx +++ b/frontend/src/lib/components/Subscriptions/views/EditSubscription.tsx @@ -285,7 +285,7 @@ export function EditSubscription({ > {({ value, onChange }) => ( onChange(val)} + onChange={(val: string) => onChange(val)} value={value} disabled={slackDisabled} mode="single" diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx index 773cfa0809949..e9014b314ef91 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx @@ -23,6 +23,7 @@ import { PersonType, PluginType, PropertyDefinition, + NotebookType, } from '~/types' import { cohortsModel } from '~/models/cohortsModel' import { actionsModel } from '~/models/actionsModel' @@ -154,7 +155,7 @@ export const taxonomicFilterLogic = kea({ eventNames, excludedProperties ): TaxonomicFilterGroup[] => { - return [ + const groups = [ { name: 'Events', searchPlaceholder: 'events', @@ -209,7 +210,7 @@ export const taxonomicFilterLogic = kea({ filter_by_event_names: true, }).url : undefined, - expandLabel: ({ count, expandedCount }) => + expandLabel: ({ count, expandedCount }: { count: number; expandedCount: number }) => `Show ${pluralize(expandedCount - count, 'property', 'properties')} that ${pluralize( eventNames.length, 'has', @@ -237,7 +238,7 @@ export const taxonomicFilterLogic = kea({ filter_by_event_names: true, }).url : undefined, - expandLabel: ({ count, expandedCount }) => + expandLabel: ({ count, expandedCount }: { count: number; expandedCount: number }) => `Show ${pluralize(expandedCount - count, 'property', 'properties')} that ${pluralize( eventNames.length, 'has', @@ -398,6 +399,16 @@ export const taxonomicFilterLogic = kea({ getValue: (dashboard: DashboardType) => dashboard.id, getPopoverHeader: () => `Dashboards`, }, + { + name: 'Notebooks', + searchPlaceholder: 'notebooks', + type: TaxonomicFilterGroupType.Notebooks, + value: 'notebooks', + endpoint: `api/projects/${teamId}/notebooks/`, + getName: (notebook: NotebookType) => notebook.title || `Notebook ${notebook.short_id}`, + getValue: (notebook: NotebookType) => notebook.short_id, + getPopoverHeader: () => 'Notebooks', + }, { name: 'Sessions', searchPlaceholder: 'sessions', @@ -408,8 +419,8 @@ export const taxonomicFilterLogic = kea({ value: '$session_duration', }, ], - getName: (option) => option.name, - getValue: (option) => option.value, + getName: (option: any) => option.name, + getValue: (option: any) => option.value, getPopoverHeader: () => 'Session', }, { @@ -422,6 +433,8 @@ export const taxonomicFilterLogic = kea({ ...groupAnalyticsTaxonomicGroups, ...groupAnalyticsTaxonomicGroupNames, ] + + return groups }, ], activeTaxonomicGroup: [ diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index 5d03149f671ea..5dd74ef575aae 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -83,6 +83,7 @@ export enum TaxonomicFilterGroupType { GroupNamesPrefix = 'name_groups', Sessions = 'sessions', HogQLExpression = 'hogql_expression', + Notebooks = 'notebooks', } export interface InfiniteListLogicProps extends TaxonomicFilterLogicProps { diff --git a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx index 2412e0b8bff8d..dc8e9384a7fd5 100644 --- a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx +++ b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx @@ -109,6 +109,8 @@ function redirectOnSelectItems( ) } else if (groupType === TaxonomicFilterGroupType.Dashboards) { router.actions.push(urls.dashboard(value)) + } else if (groupType === TaxonomicFilterGroupType.Notebooks) { + router.actions.push(urls.notebook(String(value))) } } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 855ec21465e0f..5c19ccd64c558 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -158,12 +158,14 @@ export const FEATURE_FLAGS = { REFERRAL_SOURCE_SELECT: 'referral-source-select', // owner: @raquelmsmith SURVEYS_MULTIPLE_CHOICE: 'surveys-multiple-choice', // owner: @liyiy CS_DASHBOARDS: 'cs-dashboards', // owner: @pauldambra - NOTEBOOK_SETTINGS_WIDGETS: 'notebook-settings-widgets', // owner: #team-monitoring PRODUCT_SPECIFIC_ONBOARDING: 'product-specific-onboarding', // owner: @raquelmsmith REDIRECT_SIGNUPS_TO_INSTANCE: 'redirect-signups-to-instance', // owner: @raquelmsmith APPS_AND_EXPORTS_UI: 'apps-and-exports-ui', // owner: @benjackwhite + SURVEY_NPS_RESULTS: 'survey-nps-results', // owner: @liyiy // owner: #team-monitoring SESSION_RECORDING_ALLOW_V1_SNAPSHOTS: 'session-recording-allow-v1-snapshots', + HOGQL_INSIGHTS: 'hogql-insights', // owner: @mariusandra + WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx index 5cdbadf0510a7..49bee6c0f3589 100644 --- a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx +++ b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.tsx @@ -18,20 +18,20 @@ export interface LemonSelectMultipleOptionItem extends LemonSelectMultipleOption export type LemonSelectMultipleOptions = Record -export interface LemonSelectMultipleProps { +export type LemonSelectMultipleProps = { selectClassName?: string options?: LemonSelectMultipleOptions | LemonSelectMultipleOptionItem[] - value?: string[] | null | LabelInValue[] + value?: string | string[] | null disabled?: boolean loading?: boolean placeholder?: string labelInValue?: boolean - onChange?: ((newValue: string[]) => void) | ((newValue: LabelInValue[]) => void) onSearch?: (value: string) => void onFocus?: () => void onBlur?: () => void filterOption?: boolean mode?: 'single' | 'multiple' | 'multiple-custom' + onChange?: ((newValue: string) => void) | ((newValue: string[]) => void) 'data-attr'?: string } @@ -82,12 +82,13 @@ export function LemonSelectMultiple({ showAction={['focus']} onChange={(v) => { if (onChange) { - if (labelInValue) { - const typedValues = v as LabelInValue[] - const typedOnChange = onChange as (newValue: LabelInValue[]) => void + // TRICKY: V is typed poorly and will be a string if the "mode" is undefined + if (!v || typeof v === 'string') { + const typedValues = v as string | null + const typedOnChange = onChange as (newValue: string | null) => void typedOnChange(typedValues) } else { - const typedValues = v as string[] + const typedValues = v.map((token) => token.toString().trim()) as string[] const typedOnChange = onChange as (newValue: string[]) => void typedOnChange(typedValues) } diff --git a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss index d15a0b710a0d5..389975e57915a 100644 --- a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss +++ b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss @@ -34,3 +34,15 @@ border: 1px solid var(--danger); } } + +.LemonTextArea--preview { + ul { + list-style-type: disc; + list-style-position: inside; + } + + ol { + list-style-type: decimal; + list-style-position: inside; + } +} diff --git a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.tsx b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.tsx index 9a85e20ba4829..4cfbf6bd7648f 100644 --- a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.tsx +++ b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.tsx @@ -138,7 +138,11 @@ export function LemonTextMarkdown({ value, onChange, ...editAreaProps }: LemonTe { key: 'preview', label: 'Preview', - content: value ? : Nothing to preview, + content: value ? ( + + ) : ( + Nothing to preview + ), }, ]} /> diff --git a/frontend/src/lib/lemon-ui/LemonWidget/LemonWidget.tsx b/frontend/src/lib/lemon-ui/LemonWidget/LemonWidget.tsx index ca3f49fbfce30..36ef211c3109a 100644 --- a/frontend/src/lib/lemon-ui/LemonWidget/LemonWidget.tsx +++ b/frontend/src/lib/lemon-ui/LemonWidget/LemonWidget.tsx @@ -34,7 +34,7 @@ export function LemonWidget({ title, collapsible = true, onClose, children }: Le /> ) : ( - {title} + {title} )} {onClose && } />} diff --git a/frontend/src/lib/lemon-ui/Lettermark/Lettermark.stories.tsx b/frontend/src/lib/lemon-ui/Lettermark/Lettermark.stories.tsx index ab4f3eb19ab4f..2a1eb5aa8a757 100644 --- a/frontend/src/lib/lemon-ui/Lettermark/Lettermark.stories.tsx +++ b/frontend/src/lib/lemon-ui/Lettermark/Lettermark.stories.tsx @@ -13,6 +13,9 @@ const meta: Meta = { 'Lettermarks are used as visual, icon-like representations of actors (project members, organizations, query steps, cohort criteria groups, etc) in the product. Lettermarks should vary between the 8 variants we have shown below. Ideally the same colour is not placed next to each other', }, }, + testOptions: { + waitForLoadersToDisappear: false, + }, }, tags: ['autodocs'], } diff --git a/frontend/src/lib/lemon-ui/Lettermark/Lettermark.tsx b/frontend/src/lib/lemon-ui/Lettermark/Lettermark.tsx index 1ef9c92b8b08a..a60be3adaa15f 100644 --- a/frontend/src/lib/lemon-ui/Lettermark/Lettermark.tsx +++ b/frontend/src/lib/lemon-ui/Lettermark/Lettermark.tsx @@ -37,7 +37,8 @@ export function Lettermark({ name, index, color, rounded = false }: LettermarkPr className={clsx( 'Lettermark', colorIndex && `Lettermark--variant-${colorIndex}`, - rounded && `Lettermark--rounded` + rounded && `Lettermark--rounded`, + representation === '?' && 'Lettermark--unknown' )} title={String(name)} > diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index 770c7a945f52a..72079393acb98 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -1439,6 +1439,14 @@ export function validateJson(value: string): boolean { } } +export function tryJsonParse(value: string, fallback?: any): any { + try { + return JSON.parse(value) + } catch (error) { + return fallback + } +} + export function validateJsonFormItem(_: any, value: string): Promise { return validateJson(value) ? Promise.resolve() : Promise.reject('Not valid JSON!') } diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index e6a101068a6f9..123fc595765ca 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -82,6 +82,10 @@ export const defaultMocks: Mocks = { }, // We don't want to show the "new version available" banner in tests 'https://api.github.com/repos/posthog/posthog-js/tags': () => [200, []], + 'https://www.gravatar.com/avatar/:gravatar_id': () => [404, ''], + 'https://app.posthog.com/api/early_access_features': { + earlyAccessFeatures: [], + }, }, post: { 'https://app.posthog.com/e/': (): MockSignature => [200, 'ok'], diff --git a/frontend/src/models/notebooksModel.ts b/frontend/src/models/notebooksModel.ts index 6ac02bae2e24f..ec6957e38f0c4 100644 --- a/frontend/src/models/notebooksModel.ts +++ b/frontend/src/models/notebooksModel.ts @@ -40,7 +40,7 @@ export const openNotebook = async ( if (popoverLogic?.values.visibility === 'visible') { popoverLogic?.actions.selectNotebook(notebookId) } else { - router.actions.push(urls.notebookEdit(notebookId)) + router.actions.push(urls.notebook(notebookId)) } popoverLogic?.actions.setInitialAutofocus(focus) diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index fc71b627e76e2..d81b615a5adde 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -65,8 +65,8 @@ let uniqueNode = 0 export function DataTable({ uniqueKey, query, setQuery, context, cachedResults }: DataTableProps): JSX.Element { const uniqueNodeKey = useState(() => uniqueNode++) - const [vizKey] = useState(() => `DataTable.${uniqueKey || uniqueNodeKey}`) const [dataKey] = useState(() => `DataNode.${uniqueKey || uniqueNodeKey}`) + const [vizKey] = useState(() => `DataTable.${uniqueNodeKey}`) const dataNodeLogicProps: DataNodeLogicProps = { query: query.source, @@ -374,9 +374,9 @@ export function DataTable({ uniqueKey, query, setQuery, context, cachedResults } ) : null} {showFirstRow && ( -
+
{firstRowLeft} -
+ {firstRowLeft.length > 0 && firstRowRight.length > 0 ?
: null} {firstRowRight} {showOpenEditorButton && inlineEditorButtonOnRow === 1 && !isReadOnly ? ( @@ -387,7 +387,7 @@ export function DataTable({ uniqueKey, query, setQuery, context, cachedResults } {showSecondRow && (
{secondRowLeft} -
+ {secondRowLeft.length > 0 && secondRowRight.length > 0 ?
: null} {secondRowRight} {showOpenEditorButton && inlineEditorButtonOnRow === 2 && !isReadOnly ? ( diff --git a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx index 1de315c2c900e..db3a26d62aba0 100644 --- a/frontend/src/queries/nodes/DataTable/DataTableExport.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTableExport.tsx @@ -1,12 +1,17 @@ +import Papa from 'papaparse' import { LemonButton, LemonButtonWithDropdown } from 'lib/lemon-ui/LemonButton' import { IconExport } from 'lib/lemon-ui/icons' import { triggerExport } from 'lib/components/ExportButton/exporter' import { ExporterFormat } from '~/types' import { DataNode, DataTableNode } from '~/queries/schema' -import { defaultDataTableColumns } from '~/queries/nodes/DataTable/utils' -import { isEventsQuery, isPersonsNode } from '~/queries/utils' +import { defaultDataTableColumns, extractExpressionComment } from '~/queries/nodes/DataTable/utils' +import { isEventsQuery, isHogQLQuery, isPersonsNode } from '~/queries/utils' import { getPersonsEndpoint } from '~/queries/query' import { ExportWithConfirmation } from '~/queries/nodes/DataTable/ExportWithConfirmation' +import { DataTableRow, dataTableLogic } from './dataTableLogic' +import { useValues } from 'kea' +import { LemonDivider, lemonToast } from '@posthog/lemon-ui' +import { asDisplay } from 'scenes/persons/person-utils' const EXPORT_MAX_LIMIT = 10000 @@ -39,18 +44,148 @@ function startDownload(query: DataTableNode, onlySelectedColumns: boolean): void }) } +const columnDisallowList = ['person.$delete', '*'] +const getCsvTableData = (dataTableRows: DataTableRow[], columns: string[], query: DataTableNode): string[][] => { + if (isPersonsNode(query.source)) { + const filteredColumns = columns.filter((n) => !columnDisallowList.includes(n)) + + const csvData = dataTableRows.map((n) => { + const record = n.result as Record | undefined + const recordWithPerson = { ...(record ?? {}), person: record?.name } + + return filteredColumns.map((n) => recordWithPerson[n]) + }) + + return [filteredColumns, ...csvData] + } + + if (isEventsQuery(query.source)) { + const filteredColumns = columns + .filter((n) => !columnDisallowList.includes(n)) + .map((n) => extractExpressionComment(n)) + + const csvData = dataTableRows.map((n) => { + return columns + .map((col, colIndex) => { + if (columnDisallowList.includes(col)) { + return null + } + + if (col === 'person') { + return asDisplay(n.result?.[colIndex]) + } + + return n.result?.[colIndex] + }) + .filter(Boolean) + }) + + return [filteredColumns, ...csvData] + } + + if (isHogQLQuery(query.source)) { + return [columns, ...dataTableRows.map((n) => (n.result as any[]) ?? [])] + } + + return [] +} + +const getJsonTableData = ( + dataTableRows: DataTableRow[], + columns: string[], + query: DataTableNode +): Record[] => { + if (isPersonsNode(query.source)) { + const filteredColumns = columns.filter((n) => !columnDisallowList.includes(n)) + + return dataTableRows.map((n) => { + const record = n.result as Record | undefined + const recordWithPerson = { ...(record ?? {}), person: record?.name } + + return filteredColumns.reduce((acc, cur) => { + acc[cur] = recordWithPerson[cur] + return acc + }, {} as Record) + }) + } + + if (isEventsQuery(query.source)) { + return dataTableRows.map((n) => { + return columns.reduce((acc, col, colIndex) => { + if (columnDisallowList.includes(col)) { + return acc + } + + if (col === 'person') { + acc[col] = asDisplay(n.result?.[colIndex]) + return acc + } + + const colName = extractExpressionComment(col) + + acc[colName] = n.result?.[colIndex] + + return acc + }, {} as Record) + }) + } + + if (isHogQLQuery(query.source)) { + return dataTableRows.map((n) => { + const data = n.result ?? {} + return columns.reduce((acc, cur, index) => { + acc[cur] = data[index] + return acc + }, {} as Record) + }) + } + + return [] +} + +function copyTableToCsv(dataTableRows: DataTableRow[], columns: string[], query: DataTableNode): void { + try { + const tableData = getCsvTableData(dataTableRows, columns, query) + + const csv = Papa.unparse(tableData) + + navigator.clipboard.writeText(csv).then(() => { + lemonToast.success('Table copied to clipboard!') + }) + } catch { + lemonToast.error('Copy failed!') + } +} + +function copyTableToJson(dataTableRows: DataTableRow[], columns: string[], query: DataTableNode): void { + try { + const tableData = getJsonTableData(dataTableRows, columns, query) + + const json = JSON.stringify(tableData, null, 4) + + navigator.clipboard.writeText(json).then(() => { + lemonToast.success('Table copied to clipboard!') + }) + } catch { + lemonToast.error('Copy failed!') + } +} + interface DataTableExportProps { query: DataTableNode setQuery?: (query: DataTableNode) => void } export function DataTableExport({ query }: DataTableExportProps): JSX.Element | null { + const { dataTableRows, columnsInResponse, columnsInQuery, queryWithDefaults } = useValues(dataTableLogic) + const source: DataNode = query.source const filterCount = (isEventsQuery(source) || isPersonsNode(source) ? source.properties?.length || 0 : 0) + (isEventsQuery(source) && source.event ? 1 : 0) + (isPersonsNode(source) && source.search ? 1 : 0) const canExportAllColumns = isEventsQuery(source) || isPersonsNode(source) + const showExportClipboardButtons = isPersonsNode(source) || isEventsQuery(source) || isHogQLQuery(source) return ( , - ].concat( - canExportAllColumns - ? [ - startDownload(query, false)} - actor={isPersonsNode(query.source) ? 'persons' : 'events'} - limit={EXPORT_MAX_LIMIT} - > - - Export all columns - - , - ] - : [] - ), + ] + .concat( + canExportAllColumns + ? [ + startDownload(query, false)} + actor={isPersonsNode(query.source) ? 'persons' : 'events'} + limit={EXPORT_MAX_LIMIT} + > + + Export all columns + + , + ] + : [] + ) + .concat( + showExportClipboardButtons + ? [ + , + { + if (dataTableRows) { + copyTableToCsv( + dataTableRows, + columnsInResponse ?? columnsInQuery, + queryWithDefaults + ) + } + }} + > + Copy CSV to clipboard + , + { + if (dataTableRows) { + copyTableToJson( + dataTableRows, + columnsInResponse ?? columnsInQuery, + queryWithDefaults + ) + } + }} + > + Copy JSON to clipboard + , + ] + : [] + ), }} type="secondary" icon={} diff --git a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts index cdffb15567877..5fb75476e0af3 100644 --- a/frontend/src/queries/nodes/DataTable/dataTableLogic.ts +++ b/frontend/src/queries/nodes/DataTable/dataTableLogic.ts @@ -162,8 +162,7 @@ export const dataTableLogic = kea([ showReload: query.showReload ?? showIfFull, showTimings: query.showTimings ?? flagQueryTimingsEnabled, showElapsedTime: - query.showTimings || - flagQueryTimingsEnabled || + (query.showTimings ?? flagQueryTimingsEnabled) || (query.showElapsedTime ?? ((flagQueryRunningTimeEnabled || source.kind === NodeKind.HogQLQuery) && showIfFull)), showColumnConfigurator: query.showColumnConfigurator ?? showIfFull, diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilterGroup.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilterGroup.tsx index 7a811604eda69..d9f13e313dc73 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilterGroup.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilterGroup.tsx @@ -14,15 +14,9 @@ export interface EditorFilterGroupProps { insight: Partial insightProps: InsightLogicProps query: InsightQueryNode - setQuery: (node: InsightQueryNode) => void } -export function EditorFilterGroup({ - query, - setQuery, - insightProps, - editorFilterGroup, -}: EditorFilterGroupProps): JSX.Element { +export function EditorFilterGroup({ query, insightProps, editorFilterGroup }: EditorFilterGroupProps): JSX.Element { const { title, count, defaultExpanded = true, editorFilters } = editorFilterGroup const [isRowExpanded, setIsRowExpanded] = useState(defaultExpanded) @@ -58,7 +52,7 @@ export function EditorFilterGroup({ +
) diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.scss b/frontend/src/queries/nodes/InsightViz/EditorFilters.scss index 5642f184dcfbc..de26425709f08 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.scss +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.scss @@ -1,5 +1,4 @@ @import '../../../styles/mixins'; -@import '../../../styles/mixins'; .EditorFiltersWrapper { flex-shrink: 0; @@ -48,38 +47,6 @@ display: block; padding-right: 1rem; } - &.anim--enter { - width: 0px; - - .EditorFilters { - transform: translateX(calc(-1 * var(--editor-panel-width))); - } - } - - &.anim--enter-active { - width: var(--editor-panel-width); - transition: width 250ms; - .EditorFilters { - transform: translateX(0px); - transition: transform 250ms; - } - } - - &.anim--exit { - width: var(--editor-panel-width); - .EditorFilters { - transform: translateX(0px); - } - } - - &.anim--exit-active { - width: 0px; - transition: width 250ms; - .EditorFilters { - transform: translateX(calc(-1 * var(--editor-panel-width))); - transition: transform 250ms; - } - } } .EditorFilters { @@ -95,4 +62,15 @@ } } } + + &.EditorFiltersWrapper--embedded { + margin-right: 0rem; + + @include screen($xl) { + .EditorFilters { + width: 100%; + padding-right: 0rem; + } + } + } } diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx index 1c5e57a015572..74a67460064e2 100644 --- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx @@ -42,12 +42,11 @@ import { PathsHogQL } from 'scenes/insights/EditorFilters/PathsHogQL' export interface EditorFiltersProps { query: InsightQueryNode - setQuery: (node: InsightQueryNode) => void showing: boolean embedded: boolean } -export function EditorFilters({ query, setQuery, showing, embedded }: EditorFiltersProps): JSX.Element { +export function EditorFilters({ query, showing, embedded }: EditorFiltersProps): JSX.Element { const { user } = useValues(userLogic) const availableFeatures = user?.organization?.available_features || [] @@ -280,7 +279,6 @@ export function EditorFilters({ query, setQuery, showing, embedded }: EditorFilt insight={insight} insightProps={insightProps} query={query} - setQuery={setQuery} /> ))}
diff --git a/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx b/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx index 393bf41f04178..43998ef6f7b8d 100644 --- a/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx +++ b/frontend/src/queries/nodes/InsightViz/GlobalAndOrFilters.tsx @@ -1,20 +1,21 @@ import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { PropertyGroupFilters } from './PropertyGroupFilters/PropertyGroupFilters' -import { useValues } from 'kea' +import { useActions, useValues } from 'kea' import { groupsModel } from '~/models/groupsModel' import { TrendsQuery, StickinessQuery } from '~/queries/schema' import { isTrendsQuery } from '~/queries/utils' import { actionsModel } from '~/models/actionsModel' import { getAllEventNames } from './utils' +import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' type GlobalAndOrFiltersProps = { query: TrendsQuery | StickinessQuery - setQuery: (node: TrendsQuery | StickinessQuery) => void } -export function GlobalAndOrFilters({ query, setQuery }: GlobalAndOrFiltersProps): JSX.Element { +export function GlobalAndOrFilters({ query }: GlobalAndOrFiltersProps): JSX.Element { const { actions: allActions } = useValues(actionsModel) const { groupsTaxonomicTypes } = useValues(groupsModel) + const { updateQuerySource } = useActions(insightVizDataLogic) const taxonomicGroupTypes = [ TaxonomicFilterGroupType.EventProperties, @@ -31,7 +32,7 @@ export function GlobalAndOrFilters({ query, setQuery }: GlobalAndOrFiltersProps) { @@ -34,7 +35,16 @@ let uniqueNode = 0 export function InsightViz({ uniqueKey, query, setQuery, context, readOnly }: InsightVizProps): JSX.Element { const [key] = useState(() => `InsightViz.${uniqueKey || uniqueNode++}`) - const insightProps: InsightLogicProps = context?.insightProps || { dashboardItemId: `new-AdHoc.${key}`, query } + const insightProps: InsightLogicProps = context?.insightProps || { + dashboardItemId: `new-AdHoc.${key}`, + query, + setQuery, + } + + if (!insightProps.setQuery && setQuery) { + insightProps.setQuery = setQuery + } + const dataNodeLogicProps: DataNodeLogicProps = { query: query.source, key: insightVizDataNodeKey(insightProps), @@ -46,10 +56,6 @@ export function InsightViz({ uniqueKey, query, setQuery, context, readOnly }: In const isFunnels = isFunnelsQuery(query.source) - const setQuerySource = (source: InsightQueryNode): void => { - setQuery?.({ ...query, source }) - } - const showIfFull = !!query.full const disableHeader = !(query.showHeader ?? showIfFull) const disableTable = !(query.showTable ?? showIfFull) @@ -63,35 +69,32 @@ export function InsightViz({ uniqueKey, query, setQuery, context, readOnly }: In return ( -
- {!readOnly && ( - - )} + +
+ {!readOnly && ( + + )} - {showingResults && ( -
- -
- )} -
+ {showingResults && ( +
+ +
+ )} +
+
) diff --git a/frontend/src/queries/nodes/InsightViz/LifecycleToggles.tsx b/frontend/src/queries/nodes/InsightViz/LifecycleToggles.tsx index 0c604833e7170..d40ffa1170c40 100644 --- a/frontend/src/queries/nodes/InsightViz/LifecycleToggles.tsx +++ b/frontend/src/queries/nodes/InsightViz/LifecycleToggles.tsx @@ -1,6 +1,8 @@ import { LifecycleQuery } from '~/queries/schema' import { LifecycleToggle } from '~/types' import { LemonCheckbox, LemonLabel } from '@posthog/lemon-ui' +import { useActions } from 'kea' +import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' const lifecycles: { name: LifecycleToggle; tooltip: string; color: string }[] = [ { @@ -29,21 +31,22 @@ const lifecycles: { name: LifecycleToggle; tooltip: string; color: string }[] = type LifecycleTogglesProps = { query: LifecycleQuery - setQuery: (node: LifecycleQuery) => void } const DEFAULT_LIFECYCLE_TOGGLES: LifecycleToggle[] = ['new', 'returning', 'resurrecting', 'dormant'] -export function LifecycleToggles({ query, setQuery }: LifecycleTogglesProps): JSX.Element { +export function LifecycleToggles({ query }: LifecycleTogglesProps): JSX.Element { const toggledLifecycles = query.lifecycleFilter?.toggledLifecycles || DEFAULT_LIFECYCLE_TOGGLES + const { updateQuerySource } = useActions(insightVizDataLogic) + const setToggledLifecycles = (lifecycles: LifecycleToggle[]): void => { - setQuery({ + updateQuerySource({ ...query, lifecycleFilter: { ...query.lifecycleFilter, toggledLifecycles: lifecycles, }, - }) + } as LifecycleQuery) } const toggleLifecycle = (name: LifecycleToggle): void => { diff --git a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx index bc62eb7f36bf2..501608e7abcbd 100644 --- a/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx +++ b/frontend/src/queries/nodes/InsightViz/TrendsSeries.tsx @@ -1,7 +1,7 @@ import { useValues, useActions } from 'kea' import { groupsModel } from '~/models/groupsModel' import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' -import { InsightType, FilterType, InsightLogicProps } from '~/types' +import { InsightType, FilterType } from '~/types' import { alphabet } from 'lib/utils' import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' @@ -13,15 +13,10 @@ import { actionsAndEventsToSeries } from '../InsightQuery/utils/filtersToQueryNo import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -type TrendsSeriesProps = { - insightProps: InsightLogicProps -} +export function TrendsSeries(): JSX.Element | null { + const { querySource, isTrends, isLifecycle, isStickiness, display, hasFormula } = useValues(insightVizDataLogic) + const { updateQuerySource } = useActions(insightVizDataLogic) -export function TrendsSeries({ insightProps }: TrendsSeriesProps): JSX.Element | null { - const { querySource, isTrends, isLifecycle, isStickiness, display, hasFormula } = useValues( - insightVizDataLogic(insightProps) - ) - const { updateQuerySource } = useActions(insightVizDataLogic(insightProps)) const { groupsTaxonomicTypes } = useValues(groupsModel) const propertiesTaxonomicGroupTypes = [ diff --git a/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx b/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx index 947541fdb4e27..4ccc6660ca9b0 100644 --- a/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx +++ b/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx @@ -2,11 +2,11 @@ import { useValues } from 'kea' import { insightLogic } from 'scenes/insights/insightLogic' import { Query } from '~/queries/Query/Query' -import { SavedInsightNode, NodeKind, QueryContext } from '~/queries/schema' +import { SavedInsightNode, QueryContext } from '~/queries/schema' import { InsightLogicProps, InsightModel } from '~/types' import { Animation } from 'lib/components/Animation/Animation' import { AnimationType } from 'lib/animations/animations' -import { filtersToQueryNode } from '../InsightQuery/utils/filtersToQueryNode' +import { insightDataLogic } from 'scenes/insights/insightDataLogic' interface InsightProps { query: SavedInsightNode @@ -14,9 +14,10 @@ interface InsightProps { context?: QueryContext } -export function SavedInsight({ query, context, cachedResults }: InsightProps): JSX.Element { - const insightProps: InsightLogicProps = { dashboardItemId: query.shortId, cachedInsight: cachedResults } +export function SavedInsight({ query: propsQuery, context, cachedResults }: InsightProps): JSX.Element { + const insightProps: InsightLogicProps = { dashboardItemId: propsQuery.shortId, cachedInsight: cachedResults } const { insight, insightLoading } = useValues(insightLogic(insightProps)) + const { query: dataQuery } = useValues(insightDataLogic(insightProps)) if (insightLoading) { return ( @@ -30,10 +31,7 @@ export function SavedInsight({ query, context, cachedResults }: InsightProps): J throw new Error('InsightNode expects an insight with filters') } - return ( - - ) + const query = { ...propsQuery, ...dataQuery, full: propsQuery.full } + + return } diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index 06ff315deeb24..2621f27fa3a64 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -10,6 +10,7 @@ import { isTimeToSeeDataSessionsNode, isHogQLQuery, isInsightVizNode, + isLifecycleQuery, } from './utils' import api, { ApiMethodOptions } from 'lib/api' import { getCurrentTeamId } from 'lib/utils/logics' @@ -27,6 +28,8 @@ import { toParams } from 'lib/utils' import { queryNodeToFilter } from './nodes/InsightQuery/utils/queryNodeToFilter' import { now } from 'lib/dayjs' import { currentSessionId } from 'lib/internalMetrics' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' const EXPORT_MAX_LIMIT = 10000 @@ -104,10 +107,14 @@ export async function query( const logParams: Record = {} const startTime = performance.now() + const hogQLInsightsFlagEnabled = Boolean( + featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.HOGQL_INSIGHTS] + ) + try { if (isPersonsNode(queryNode)) { response = await api.get(getPersonsEndpoint(queryNode), methodOptions) - } else if (isInsightQueryNode(queryNode)) { + } else if (isInsightQueryNode(queryNode) && !(hogQLInsightsFlagEnabled && isLifecycleQuery(queryNode))) { const filters = queryNodeToFilter(queryNode) const params = { ...filters, diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 4412d012c5efb..792812f2bd585 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -1411,6 +1411,9 @@ ], "description": "Property filters for all series" }, + "response": { + "$ref": "#/definitions/LifecycleQueryResponse" + }, "samplingFactor": { "description": "Sampling rate", "type": ["number", "null"] @@ -1433,6 +1436,25 @@ "required": ["kind", "series"], "type": "object" }, + "LifecycleQueryResponse": { + "additionalProperties": false, + "properties": { + "result": { + "items": { + "type": "object" + }, + "type": "array" + }, + "timings": { + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["result"], + "type": "object" + }, "LifecycleToggle": { "enum": ["new", "resurrecting", "returning", "dormant"], "type": "string" @@ -1888,10 +1910,18 @@ "SavedInsightNode": { "additionalProperties": false, "properties": { + "allowSorting": { + "description": "Can the user click on column headers to sort the table? (default: true)", + "type": "boolean" + }, "embedded": { "description": "Query is embedded inside another bordered component", "type": "boolean" }, + "expandable": { + "description": "Can expand row to show raw event data (default: true)", + "type": "boolean" + }, "full": { "description": "Show with most visual options enabled. Used in insight scene.", "type": "boolean" @@ -1900,29 +1930,93 @@ "const": "SavedInsightNode", "type": "string" }, + "propertiesViaUrl": { + "description": "Link properties via the URL (default: false)", + "type": "boolean" + }, "shortId": { "$ref": "#/definitions/InsightShortId" }, + "showActions": { + "description": "Show the kebab menu at the end of the row", + "type": "boolean" + }, + "showColumnConfigurator": { + "description": "Show a button to configure the table's columns if possible", + "type": "boolean" + }, "showCorrelationTable": { "type": "boolean" }, + "showDateRange": { + "description": "Show date range selector", + "type": "boolean" + }, + "showElapsedTime": { + "description": "Show the time it takes to run a query", + "type": "boolean" + }, + "showEventFilter": { + "description": "Include an event filter above the table (EventsNode only)", + "type": "boolean" + }, + "showExport": { + "description": "Show the export button", + "type": "boolean" + }, "showFilters": { "type": "boolean" }, "showHeader": { "type": "boolean" }, + "showHogQLEditor": { + "description": "Include a HogQL query editor above HogQL tables", + "type": "boolean" + }, "showLastComputation": { "type": "boolean" }, "showLastComputationRefresh": { "type": "boolean" }, + "showOpenEditorButton": { + "description": "Show a button to open the current query as a new insight. (default: true)", + "type": "boolean" + }, + "showPersistentColumnConfigurator": { + "description": "Show a button to configure and persist the table's default columns if possible", + "type": "boolean" + }, + "showPropertyFilter": { + "description": "Include a property filter above the table", + "type": "boolean" + }, + "showReload": { + "description": "Show a reload button", + "type": "boolean" + }, "showResults": { "type": "boolean" }, + "showResultsTable": { + "description": "Show a results table", + "type": "boolean" + }, + "showSavedQueries": { + "description": "Shows a list of saved queries", + "type": "boolean" + }, + "showSearch": { + "description": "Include a free text search field (PersonsNode only)", + "type": "boolean" + }, "showTable": { "type": "boolean" + }, + "showTimings": { + "description": "Show a detailed query timing breakdown", + "type": "boolean" } }, "required": ["kind", "shortId"], diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 1a7814cc71cb7..1d5cd9e689d31 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -273,7 +273,7 @@ export interface PersonsNode extends DataNode { export type HasPropertiesNode = EventsNode | EventsQuery | PersonsNode -export interface DataTableNode extends Node { +export interface DataTableNode extends Node, DataTableNodeViewProps { kind: NodeKind.DataTableNode /** Source of the events */ source: EventsNode | EventsQuery | PersonsNode | HogQLQuery | TimeToSeeDataSessionsQuery @@ -282,8 +282,10 @@ export interface DataTableNode extends Node { columns?: HogQLExpression[] /** Columns that aren't shown in the table, even if in columns or returned data */ hiddenColumns?: HogQLExpression[] - /** Show with most visual options enabled. Used in scenes. */ - full?: boolean +} + +interface DataTableNodeViewProps { + /** Show with most visual options enabled. Used in scenes. */ full?: boolean /** Include an event filter above the table (EventsNode only) */ showEventFilter?: boolean /** Include a free text search field (PersonsNode only) */ @@ -326,7 +328,7 @@ export interface DataTableNode extends Node { // Saved insight node -export interface SavedInsightNode extends Node, InsightVizNodeViewProps { +export interface SavedInsightNode extends Node, InsightVizNodeViewProps, DataTableNodeViewProps { kind: NodeKind.SavedInsightNode shortId: InsightShortId } @@ -440,6 +442,11 @@ export type LifecycleFilter = Omit & { toggledLifecycles?: LifecycleToggle[] } // using everything except what it inherits from FilterType +export interface LifecycleQueryResponse { + result: Record[] + timings?: QueryTiming[] +} + export interface LifecycleQuery extends InsightsQueryBase { kind: NodeKind.LifecycleQuery /** Granularity of the response. Can be one of `hour`, `day`, `week` or `month` */ @@ -448,6 +455,7 @@ export interface LifecycleQuery extends InsightsQueryBase { series: (EventsNode | ActionsNode)[] /** Properties specific to the lifecycle insight */ lifecycleFilter?: LifecycleFilter + response?: LifecycleQueryResponse } export type InsightQueryNode = diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index b4ceb4a3967a7..4b6203b3aa294 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -38,7 +38,7 @@ export const appScenes: Record any> = { [Scene.EarlyAccessFeature]: () => import('./early-access-features/EarlyAccessFeature'), [Scene.Surveys]: () => import('./surveys/Surveys'), [Scene.Survey]: () => import('./surveys/Survey'), - [Scene.DataWarehouse]: () => import('./data-warehouse/posthog/DataWarehousePosthogScene'), + [Scene.DataWarehouse]: () => import('./data-warehouse/external/DataWarehouseExternalScene'), [Scene.DataWarehousePosthog]: () => import('./data-warehouse/posthog/DataWarehousePosthogScene'), [Scene.DataWarehouseExternal]: () => import('./data-warehouse/external/DataWarehouseExternalScene'), [Scene.DataWarehouseSavedQueries]: () => import('./data-warehouse/saved_queries/DataWarehouseSavedQueriesScene'), diff --git a/frontend/src/scenes/authentication/Login.tsx b/frontend/src/scenes/authentication/Login.tsx index 883c3215db9bb..b3c3cf1f70149 100644 --- a/frontend/src/scenes/authentication/Login.tsx +++ b/frontend/src/scenes/authentication/Login.tsx @@ -168,7 +168,9 @@ export function Login(): JSX.Element {
)} - + {!precheckResponse.saml_available && !precheckResponse.sso_enforcement && ( + + )}
) diff --git a/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx b/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx index 16cf9ccd3f3a0..be8f7d9c64016 100644 --- a/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx +++ b/frontend/src/scenes/batch_exports/BatchExportEditForm.tsx @@ -180,24 +180,40 @@ export function BatchExportsEditForm(props: BatchExportsEditLogicProps): JSX.Ele ]} /> +
+ + + + +
+ + + +
- - - +
+ + + {batchExportConfigForm.encryption == 'aws:kms' && ( + + + + )}
+ - + ) } diff --git a/frontend/src/scenes/data-management/database/databaseSceneLogic.ts b/frontend/src/scenes/data-management/database/databaseSceneLogic.ts index 60ed5af915776..c42e431d49986 100644 --- a/frontend/src/scenes/data-management/database/databaseSceneLogic.ts +++ b/frontend/src/scenes/data-management/database/databaseSceneLogic.ts @@ -45,6 +45,14 @@ export const databaseSceneLogic = kea([ .sort((a, b) => a.name.localeCompare(b.name)) }, ], + tableOptions: [ + (s) => [s.filteredTables], + (filteredTables: DatabaseSceneRow[]) => + filteredTables.map((row) => ({ + value: row, + label: row.name, + })), + ], }), afterMount(({ actions }) => actions.loadDatabase()), ]) diff --git a/frontend/src/scenes/data-warehouse/DataWarehousePageTabs.tsx b/frontend/src/scenes/data-warehouse/DataWarehousePageTabs.tsx index f3ec71db47111..821746f7f59e6 100644 --- a/frontend/src/scenes/data-warehouse/DataWarehousePageTabs.tsx +++ b/frontend/src/scenes/data-warehouse/DataWarehousePageTabs.tsx @@ -13,8 +13,8 @@ export enum DataWarehouseTab { } const tabUrls = { - [DataWarehouseTab.Posthog]: urls.dataWarehousePosthog(), [DataWarehouseTab.External]: urls.dataWarehouseExternal(), + [DataWarehouseTab.Posthog]: urls.dataWarehousePosthog(), [DataWarehouseTab.Views]: urls.dataWarehouseSavedQueries(), } @@ -25,7 +25,7 @@ const dataWarehouseTabsLogic = kea({ }, reducers: { tab: [ - DataWarehouseTab.Posthog as DataWarehouseTab, + DataWarehouseTab.External as DataWarehouseTab, { setTab: (_, { tab }) => tab, }, @@ -58,14 +58,14 @@ export function DataWarehousePageTabs({ tab }: { tab: DataWarehouseTab }): JSX.E activeKey={tab} onChange={(t) => setTab(t)} tabs={[ - { - key: DataWarehouseTab.Posthog, - label: Posthog, - }, { key: DataWarehouseTab.External, label: External, }, + { + key: DataWarehouseTab.Posthog, + label: Posthog, + }, ...(featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE_VIEWS] ? [ { diff --git a/frontend/src/scenes/data-warehouse/DataWarehouseTable.tsx b/frontend/src/scenes/data-warehouse/DataWarehouseTable.tsx index f252be79c400c..6a2b1c6cf4652 100644 --- a/frontend/src/scenes/data-warehouse/DataWarehouseTable.tsx +++ b/frontend/src/scenes/data-warehouse/DataWarehouseTable.tsx @@ -106,6 +106,7 @@ export function TableForm({ id }: { id: string }): JSX.Element { options={[ { label: 'Parquet (recommended)', value: 'Parquet' }, { label: 'CSV', value: 'CSV' }, + { label: 'JSON', value: 'JSONEachRow' }, ]} /> diff --git a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx index 23648b19435f2..f2c617c5512e5 100644 --- a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx +++ b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx @@ -6,11 +6,11 @@ import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' import { Form, Field } from 'kea-forms' import { useActions, useValues } from 'kea' import { DatabaseSchemaQueryResponseField } from '~/queries/schema' +import { databaseSceneLogic } from 'scenes/data-management/database/databaseSceneLogic' -export function ViewLinkModal(): JSX.Element { - const { viewOptions, toJoinKeyOptions, selectedView, selectedTable, isFieldModalOpen, fromJoinKeyOptions } = - useValues(viewLinkLogic) - const { selectView, toggleFieldModal } = useActions(viewLinkLogic) +export function ViewLinkModal({ tableSelectable }: { tableSelectable: boolean }): JSX.Element { + const { isFieldModalOpen } = useValues(viewLinkLogic) + const { toggleFieldModal } = useActions(viewLinkLogic) return ( -
-
-
-
- Table - {selectedTable ? selectedTable.name : ''} -
-
- View - - - -
+ + + ) +} + +interface ViewLinkFormProps { + tableSelectable: boolean +} + +export function ViewLinkForm({ tableSelectable }: ViewLinkFormProps): JSX.Element { + const { viewOptions, toJoinKeyOptions, selectedView, selectedTable, fromJoinKeyOptions } = useValues(viewLinkLogic) + const { selectView, toggleFieldModal, selectTable } = useActions(viewLinkLogic) + const { tableOptions } = useValues(databaseSceneLogic) + + return ( + +
+
+
+ Table + {tableSelectable ? ( + + ) : selectedTable ? ( + selectedTable.name + ) : ( + '' + )}
-
-
- Table Key - - - -
-
- -
-
- View Key - - - -
+
+ View + + +
- -
- - Close - - - Save - +
+
+ Table Key + + + +
+
+ +
+
+ View Key + + + +
- - +
+ +
+ + Close + + + Save + +
+ ) } diff --git a/frontend/src/scenes/data-warehouse/external/DataWarehouseExternalScene.tsx b/frontend/src/scenes/data-warehouse/external/DataWarehouseExternalScene.tsx index a4750db49090c..4b163af4135f4 100644 --- a/frontend/src/scenes/data-warehouse/external/DataWarehouseExternalScene.tsx +++ b/frontend/src/scenes/data-warehouse/external/DataWarehouseExternalScene.tsx @@ -30,13 +30,15 @@ export function DataWarehouseExternalScene(): JSX.Element {
} buttons={ - - New Table - + !shouldShowProductIntroduction ? ( + + New Table + + ) : undefined } caption={
diff --git a/frontend/src/scenes/data-warehouse/posthog/DataWarehousePosthogScene.tsx b/frontend/src/scenes/data-warehouse/posthog/DataWarehousePosthogScene.tsx index e9166d1145dac..af92f40749873 100644 --- a/frontend/src/scenes/data-warehouse/posthog/DataWarehousePosthogScene.tsx +++ b/frontend/src/scenes/data-warehouse/posthog/DataWarehousePosthogScene.tsx @@ -1,9 +1,14 @@ -import { LemonTag } from '@posthog/lemon-ui' +import { LemonButton, LemonTag } from '@posthog/lemon-ui' import { PageHeader } from 'lib/components/PageHeader' import { SceneExport } from 'scenes/sceneTypes' import { databaseSceneLogic } from 'scenes/data-management/database/databaseSceneLogic' import { DataWarehousePageTabs, DataWarehouseTab } from '../DataWarehousePageTabs' import { DatabaseTablesContainer } from 'scenes/data-management/database/DatabaseTables' +import { ViewLinkModal } from '../ViewLinkModal' +import { useActions, useValues } from 'kea' +import { viewLinkLogic } from '../viewLinkLogic' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' export const scene: SceneExport = { component: DataWarehousePosthogScene, @@ -11,6 +16,8 @@ export const scene: SceneExport = { } export function DataWarehousePosthogScene(): JSX.Element { + const { toggleFieldModal } = useActions(viewLinkLogic) + const { featureFlags } = useValues(featureFlagLogic) return (
} + buttons={ + featureFlags[FEATURE_FLAGS.DATA_WAREHOUSE_VIEWS] ? ( + + Link table to view + + ) : undefined + } /> +
) } diff --git a/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx b/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx index 85347a8cacf3d..97f0b710468eb 100644 --- a/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx +++ b/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx @@ -8,6 +8,7 @@ import { databaseSceneLogic } from 'scenes/data-management/database/databaseScen import { loaders } from 'kea-loaders' import { lemonToast } from 'lib/lemon-ui/lemonToast' import type { viewLinkLogicType } from './viewLinkLogicType' +import { ViewLinkKeyLabel } from './ViewLinkModal' const NEW_VIEW_LINK: DataWarehouseViewLink = { id: 'new', @@ -17,6 +18,11 @@ const NEW_VIEW_LINK: DataWarehouseViewLink = { from_join_key: undefined, } +export interface KeySelectOption { + value: string + label: JSX.Element +} + export const viewLinkLogic = kea([ path(['scenes', 'data-warehouse', 'viewLinkLogic']), connect({ @@ -63,11 +69,34 @@ export const viewLinkLogic = kea([ forms(({ actions, values }) => ({ viewLink: { defaults: NEW_VIEW_LINK, - errors: ({ saved_query_id, to_join_key, from_join_key }) => ({ - saved_query_id: !saved_query_id ? 'Must select a view' : undefined, - to_join_key: !to_join_key ? 'Must select a join key' : undefined, - from_join_key: !from_join_key ? 'Must select a join key' : undefined, - }), + errors: ({ saved_query_id, to_join_key, from_join_key }) => { + let to_join_key_err: string | undefined = undefined + let from_join_key_err: string | undefined = undefined + + if (!to_join_key) { + to_join_key_err = 'Must select a join key' + } + + if (!from_join_key) { + from_join_key_err = 'Must select a join key' + } + + if ( + to_join_key && + from_join_key && + values.mappedToJoinKeyOptions[to_join_key]?.type !== + values.mappedFromJoinKeyOptions[from_join_key]?.type + ) { + to_join_key_err = 'Join key types must match' + from_join_key_err = 'Join key types must match' + } + + return { + saved_query_id: !saved_query_id ? 'Must select a view' : undefined, + to_join_key: to_join_key_err, + from_join_key: from_join_key_err, + } + }, submit: async ({ saved_query_id, to_join_key, from_join_key }) => { if (values.selectedTable) { await api.dataWarehouseViewLinks.create({ @@ -77,7 +106,8 @@ export const viewLinkLogic = kea([ from_join_key, }) actions.toggleFieldModal() - actions.loadDatabase() + // actions.loadDatabase() + // actions.loadViewLinks() } }, }, @@ -120,26 +150,52 @@ export const viewLinkLogic = kea([ ], toJoinKeyOptions: [ (s) => [s.selectedView], - (selectedView: DataWarehouseSceneRow | null) => { + (selectedView: DataWarehouseSceneRow | null): KeySelectOption[] => { if (!selectedView) { return [] } return selectedView.columns.map((column) => ({ value: column.key, - label: column.key, + label: , })) }, ], + mappedToJoinKeyOptions: [ + (s) => [s.selectedView], + (selectedView: DataWarehouseSceneRow | null) => { + if (!selectedView) { + return [] + } + return selectedView.columns.reduce((acc, column) => { + acc[column.key] = column + return acc + }, {}) + }, + ], fromJoinKeyOptions: [ + (s) => [s.selectedTable], + (selectedTable: DataWarehouseSceneRow | null): KeySelectOption[] => { + if (!selectedTable) { + return [] + } + return selectedTable.columns + .filter((column) => column.type !== 'view') + .map((column) => ({ + value: column.key, + label: , + })) + }, + ], + mappedFromJoinKeyOptions: [ (s) => [s.selectedTable], (selectedTable: DataWarehouseSceneRow | null) => { if (!selectedTable) { return [] } - return selectedTable.columns.map((column) => ({ - value: column.key, - label: column.key, - })) + return selectedTable.columns.reduce((acc, column) => { + acc[column.key] = column + return acc + }, {}) }, ], }), diff --git a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx index e66368e19db1f..b5d0f76e29e16 100644 --- a/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx +++ b/frontend/src/scenes/experiments/ExperimentCodeSnippets.tsx @@ -51,7 +51,7 @@ export function JSSnippet({ flagKey, variant }: SnippetProps): JSX.Element { Test that it works - {`posthog.feature_flags.override({'${flagKey}': '${variant}'})`} + {`posthog.featureFlags.override({'${flagKey}': '${variant}'})`} ) diff --git a/frontend/src/scenes/experiments/MetricSelector.tsx b/frontend/src/scenes/experiments/MetricSelector.tsx index 307bbd61c7762..b21286f758f92 100644 --- a/frontend/src/scenes/experiments/MetricSelector.tsx +++ b/frontend/src/scenes/experiments/MetricSelector.tsx @@ -133,11 +133,7 @@ export function ExperimentInsightCreator({ insightProps }: { insightProps: Insig
- + )} @@ -146,7 +142,7 @@ export function ExperimentInsightCreator({ insightProps }: { insightProps: Insig ) } -export function AttributionSelect({ insightProps, query, setQuery }: EditorFilterProps): JSX.Element { +export function AttributionSelect({ insightProps, query }: EditorFilterProps): JSX.Element { return (
@@ -170,7 +166,7 @@ export function AttributionSelect({ insightProps, query, setQuery }: EditorFilte - +
) } diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 440e3013d937e..8476b4fd31505 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -65,7 +65,7 @@ import { PostHogFeature } from 'posthog-js/react' import { concatWithPunctuation } from 'scenes/insights/utils' import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs' import { FeatureFlagReleaseConditions } from './FeatureFlagReleaseConditions' -import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton' +import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' export const scene: SceneExport = { component: FeatureFlag, @@ -514,7 +514,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element { buttons={ <>
- null, analytics_dashboards: () => null, has_enriched_analytics: () => null, + surveys: () => null, } export function flagActivityDescriber(logItem: ActivityLogItem, asNotification?: boolean): HumanizedChange { diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts index 23aedb1086bba..f0516fe9956e1 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.test.ts @@ -37,6 +37,7 @@ function generateFeatureFlag( usage_dashboard: 1234, tags: [], has_enriched_analytics, + surveys: [], } } diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 24f90439d16f8..aeb4b9471f764 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -19,6 +19,8 @@ import { DashboardBasicType, NewEarlyAccessFeatureType, EarlyAccessFeatureType, + Survey, + SurveyQuestionType, } from '~/types' import api from 'lib/api' import { router, urlToAction } from 'kea-router' @@ -40,6 +42,7 @@ import { userLogic } from 'scenes/userLogic' import { newDashboardLogic } from 'scenes/dashboard/newDashboardLogic' import { dashboardsLogic } from 'scenes/dashboard/dashboards/dashboardsLogic' import { NEW_EARLY_ACCESS_FEATURE } from 'scenes/early-access-features/earlyAccessFeatureLogic' +import { NEW_SURVEY, NewSurvey } from 'scenes/surveys/surveyLogic' const getDefaultRollbackCondition = (): FeatureFlagRollbackConditions => ({ operator: 'gt', @@ -73,6 +76,7 @@ const NEW_FLAG: FeatureFlagType = { experiment_set: null, features: [], rollback_conditions: [], + surveys: null, performed_rollback: false, can_edit: true, tags: [], @@ -414,6 +418,15 @@ export const featureFlagLogic = kea([ features: [...(state.features || []), newEarlyAccessFeature], } }, + createSurveySuccess: (state, { newSurvey }) => { + if (!state) { + return state + } + return { + ...state, + surveys: [...(state.surveys || []), newSurvey], + } + }, }, ], featureFlagMissing: [false, { setFeatureFlagMissing: () => true }], @@ -520,12 +533,33 @@ export const featureFlagLogic = kea([ null as EarlyAccessFeatureType | null, { createEarlyAccessFeature: async () => { - const updatedEarlyAccessFeature = { + const newEarlyAccessFeature = { ...NEW_EARLY_ACCESS_FEATURE, name: `Early access: ${values.featureFlag.key}`, feature_flag_id: values.featureFlag.id, } - return await api.earlyAccessFeatures.create(updatedEarlyAccessFeature as NewEarlyAccessFeatureType) + return await api.earlyAccessFeatures.create(newEarlyAccessFeature as NewEarlyAccessFeatureType) + }, + }, + ], + // used to generate a new survey + // but all subsequent operations after generation should occur via the surveyLogic + newSurvey: [ + null as Survey | null, + { + createSurvey: async () => { + const newSurvey = { + ...NEW_SURVEY, + name: `Survey: ${values.featureFlag.key}`, + linked_flag_id: values.featureFlag.id, + questions: [ + { + type: SurveyQuestionType.Open, + question: `What do you think of ${values.featureFlag.key}?`, + }, + ], + } + return await api.surveys.create(newSurvey as NewSurvey) }, }, ], @@ -869,6 +903,22 @@ export const featureFlagLogic = kea([ return (featureFlag?.features?.length || 0) > 0 }, ], + canCreateEarlyAccessFeature: [ + (s) => [s.featureFlag, s.variants], + (featureFlag, variants) => { + return ( + featureFlag && + featureFlag.filters.aggregation_group_type_index == undefined && + variants.length === 0 + ) + }, + ], + hasSurveys: [ + (s) => [s.featureFlag], + (featureFlag) => { + return featureFlag?.surveys && featureFlag.surveys.length > 0 + }, + ], }), urlToAction(({ actions, props }) => ({ [urls.featureFlag(props.id ?? 'new')]: (_, __, ___, { method }) => { diff --git a/frontend/src/scenes/ingestion/Sidebar.tsx b/frontend/src/scenes/ingestion/Sidebar.tsx index 97817b5272345..a8da1b32fd70b 100644 --- a/frontend/src/scenes/ingestion/Sidebar.tsx +++ b/frontend/src/scenes/ingestion/Sidebar.tsx @@ -42,7 +42,7 @@ export function Sidebar(): JSX.Element { {currentOrganization?.teams && currentOrganization.teams.length > 1 && ( <> } + icon={} onClick={() => toggleProjectSwitcher()} dropdown={{ visible: isProjectSwitcherShown, diff --git a/frontend/src/scenes/insights/Insight.tsx b/frontend/src/scenes/insights/Insight.tsx index faea92fc4ec7c..e782500ebf652 100644 --- a/frontend/src/scenes/insights/Insight.tsx +++ b/frontend/src/scenes/insights/Insight.tsx @@ -30,7 +30,6 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element { // insightDataLogic const { query, isQueryBasedInsight, showQueryEditor } = useValues(insightDataLogic(insightProps)) - const { setQuery } = useActions(insightDataLogic(insightProps)) // other logics useMountedLogic(insightCommandLogic(insightProps)) @@ -58,7 +57,6 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element { ([ timezone: [(s) => [s.insightData], (insightData) => insightData?.timezone || 'UTC'], }), - listeners(({ actions, values }) => ({ + listeners(({ actions, values, props }) => ({ updateDateRange: ({ dateRange }) => { const localQuerySource = values.querySource ? values.querySource @@ -242,6 +242,10 @@ export const insightVizDataLogic = kea([ } }, setQuery: ({ query }) => { + if (props.setQuery) { + props.setQuery(query as InsightVizNode) + } + if (isInsightVizNode(query)) { const querySource = query.source const filters = queryNodeToFilter(querySource) diff --git a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx index 3ae8e5abc5745..34f683d268e75 100644 --- a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx @@ -28,7 +28,7 @@ import { lineGraphLogic } from 'scenes/insights/views/LineGraph/lineGraphLogic' import { TooltipConfig } from 'scenes/insights/InsightTooltip/insightTooltipUtils' import { groupsModel } from '~/models/groupsModel' import { ErrorBoundary } from '~/layout/ErrorBoundary' -import { formatPercentStackAxisValue } from 'scenes/insights/aggregationAxisFormat' +import { formatAggregationAxisValue, formatPercentStackAxisValue } from 'scenes/insights/aggregationAxisFormat' import { insightLogic } from 'scenes/insights/insightLogic' import { useResizeObserver } from 'lib/hooks/useResizeObserver' import { PieChart } from 'scenes/insights/views/LineGraph/PieChart' @@ -471,8 +471,27 @@ export function LineGraph_({ }} renderCount={ tooltipConfig?.renderCount || - ((value: number): string => - formatPercentStackAxisValue(trendsFilter, value, isPercentStackView)) + ((value: number): string => { + if (!isPercentStackView) { + return formatAggregationAxisValue(trendsFilter, value) + } + + const total = seriesData.reduce((a, b) => a + b.count, 0) + const percentageLabel: number = parseFloat( + ((value / total) * 100).toFixed(1) + ) + + const isNaN = Number.isNaN(percentageLabel) + + if (isNaN) { + return formatAggregationAxisValue(trendsFilter, value) + } + + return `${formatAggregationAxisValue( + trendsFilter, + value + )} (${percentageLabel}%)` + }) } entitiesAsColumnsOverride={formula ? false : undefined} hideInspectActorsSection={!onClick || !showPersonsModal} diff --git a/frontend/src/scenes/instance/SystemStatus/index.tsx b/frontend/src/scenes/instance/SystemStatus/index.tsx index 95351c18e224e..11adb42107c21 100644 --- a/frontend/src/scenes/instance/SystemStatus/index.tsx +++ b/frontend/src/scenes/instance/SystemStatus/index.tsx @@ -30,7 +30,7 @@ export function SystemStatus(): JSX.Element { const { user } = useValues(userLogic) const { featureFlags } = useValues(featureFlagLogic) - const tabs = [ + let tabs = [ { key: 'overview', label: 'System overview', @@ -39,7 +39,7 @@ export function SystemStatus(): JSX.Element { ] as LemonTab[] if (user?.is_staff) { - tabs.concat([ + tabs = tabs.concat([ { key: 'metrics', label: 'Internal metrics', diff --git a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx index ee4ca592ef1cd..12b5bc243f948 100644 --- a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx @@ -5,6 +5,8 @@ import { ReactNodeViewRenderer, ExtendedRegExpMatchArray, Attribute, + NodeViewProps, + getExtensionField, } from '@tiptap/react' import { ReactNode, useCallback, useRef } from 'react' import clsx from 'clsx' @@ -17,9 +19,8 @@ import { notebookLogic } from '../Notebook/notebookLogic' import { useInView } from 'react-intersection-observer' import { NotebookNodeType } from '~/types' import { ErrorBoundary } from '~/layout/ErrorBoundary' -import { NotebookNodeContext, notebookNodeLogic } from './notebookNodeLogic' -import { uuid } from 'lib/utils' -import { posthogNodePasteRule } from './utils' +import { NotebookNodeContext, NotebookNodeLogicProps, notebookNodeLogic } from './notebookNodeLogic' +import { posthogNodePasteRule, useSyncedAttributes } from './utils' import { NotebookNodeAttributes, NotebookNodeViewProps, @@ -61,18 +62,20 @@ export function NodeWrapper({ minHeight, node, getPos, + attributes, updateAttributes, widgets = [], }: NodeWrapperProps & NotebookNodeViewProps): JSX.Element { const mountedNotebookLogic = useMountedLogic(notebookLogic) - const { isEditable } = useValues(mountedNotebookLogic) + const { isEditable, isShowingSidebar } = useValues(mountedNotebookLogic) + const { setIsShowingSidebar } = useActions(mountedNotebookLogic) // nodeId can start null, but should then immediately be generated - const nodeId = node.attrs.nodeId - const nodeLogicProps = { + const nodeId = attributes.nodeId + const nodeLogicProps: NotebookNodeLogicProps = { node, nodeType, - nodeAttributes: node.attrs, + attributes, updateAttributes, nodeId, notebookLogic: mountedNotebookLogic, @@ -84,13 +87,13 @@ export function NodeWrapper({ } const nodeLogic = useMountedLogic(notebookNodeLogic(nodeLogicProps)) const { title, resizeable, expanded } = useValues(nodeLogic) - const { setExpanded, deleteNode, setWidgetsVisible } = useActions(nodeLogic) + const { setExpanded, deleteNode } = useActions(nodeLogic) const [ref, inView] = useInView({ triggerOnce: true }) const contentRef = useRef(null) // If resizeable is true then the node attr "height" is required - const height = node.attrs.height ?? heightEstimate + const height = attributes.height ?? heightEstimate const onResizeStart = useCallback((): void => { if (!resizeable) { @@ -104,14 +107,14 @@ export function NodeWrapper({ if (heightAttr && heightAttr !== initialHeightAttr) { updateAttributes({ height: contentRef.current?.clientHeight, - }) + } as any) } } window.addEventListener('mouseup', onResizedEnd) }, [resizeable, updateAttributes]) - const parsedHref = typeof href === 'function' ? href(node.attrs) : href + const parsedHref = typeof href === 'function' ? href(attributes) : href // Element is resizable if resizable is set to true. If expandable is set to true then is is only resizable if expanded is true const isResizeable = resizeable && (!expandable || expanded) @@ -163,11 +166,12 @@ export function NodeWrapper({ /> )} - {!!widgets.length && isEditable ? ( + {widgets.length > 0 ? ( setWidgetsVisible(true)} + onClick={() => setIsShowingSidebar(!isShowingSidebar)} size="small" icon={} + active={isShowingSidebar && selected} /> ) : null} @@ -211,27 +215,38 @@ export type CreatePostHogWidgetNodeOptions> widgets?: NotebookNodeWidget[] + serializedText?: (attributes: NotebookNodeAttributes) => string } export function createPostHogWidgetNode({ Component, pasteOptions, attributes, + serializedText, ...wrapperProps }: CreatePostHogWidgetNodeOptions): Node { - const WrappedComponent = (props: NotebookNodeViewProps): JSX.Element => { + // NOTE: We use NodeViewProps here as we convert them to NotebookNodeViewProps + const WrappedComponent = (props: NodeViewProps): JSX.Element => { + const [attributes, updateAttributes] = useSyncedAttributes(props) + if (props.node.attrs.nodeId === null) { // TODO only wrapped in setTimeout because of the flushSync bug setTimeout(() => { props.updateAttributes({ - nodeId: uuid(), + nodeId: attributes.nodeId, }) }, 0) } + const nodeProps: NotebookNodeViewProps = { + ...props, + attributes, + updateAttributes, + } + return ( - - + + ) } @@ -242,6 +257,19 @@ export function createPostHogWidgetNode( atom: true, draggable: true, + serializedText: serializedText, + + extendNodeSchema(extension) { + const context = { + name: extension.name, + options: extension.options, + storage: extension.storage, + } + return { + serializedText: getExtensionField(extension, 'serializedText', context), + } + }, + addAttributes() { return { height: {}, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx index 9935f9c6f1608..154600a7e1d3f 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx @@ -2,7 +2,16 @@ import { mergeAttributes, Node, NodeViewProps } from '@tiptap/core' import { NodeViewWrapper, ReactNodeViewRenderer } from '@tiptap/react' import { InsightModel, NotebookNodeType, NotebookTarget } from '~/types' import { Link } from '@posthog/lemon-ui' -import { IconGauge, IconBarChart, IconFlag, IconExperiment, IconLive, IconPerson, IconCohort } from 'lib/lemon-ui/icons' +import { + IconGauge, + IconBarChart, + IconFlag, + IconExperiment, + IconLive, + IconPerson, + IconCohort, + IconJournal, +} from 'lib/lemon-ui/icons' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { urls } from 'scenes/urls' import clsx from 'clsx' @@ -22,6 +31,7 @@ const ICON_MAP = { events: , persons: , cohorts: , + notebooks: , } const Component = (props: NodeViewProps): JSX.Element => { @@ -67,6 +77,8 @@ function backlinkHref(id: string, type: TaxonomicFilterGroupType): string { return urls.experiment(id) } else if (type === TaxonomicFilterGroupType.Dashboards) { return urls.dashboard(id) + } else if (type === TaxonomicFilterGroupType.Notebooks) { + return urls.notebook(id) } return '' } @@ -139,6 +151,16 @@ export const NotebookNodeBacklink = Node.create({ return { id: id, type: TaxonomicFilterGroupType.Dashboards, title: dashboard.name } }, }), + posthogNodePasteRule({ + find: urls.notebook('(.+)'), + editor: this.editor, + type: this.type, + getAttributes: async (match) => { + const id = match[1] + const notebook = await api.notebooks.get(id) + return { id: id, type: TaxonomicFilterGroupType.Notebooks, title: notebook.title } + }, + }), ] }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx index fe4c25393f580..801970c380b51 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeEarlyAccessFeature.tsx @@ -16,7 +16,7 @@ import { PersonList } from 'scenes/early-access-features/EarlyAccessFeature' import { buildFlagContent } from './NotebookNodeFlag' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const { id } = props.node.attrs + const { id } = props.attributes const { earlyAccessFeature, earlyAccessFeatureLoading } = useValues(earlyAccessFeatureLogic({ id })) const { expanded } = useValues(notebookNodeLogic) const { insertAfter } = useActions(notebookNodeLogic) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeExperiment.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeExperiment.tsx index ae7af2b3c00a3..cb62cd17f301f 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeExperiment.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeExperiment.tsx @@ -19,7 +19,7 @@ import { ExperimentResult } from 'scenes/experiments/ExperimentResult' import { ResultsTag, StatusTag } from 'scenes/experiments/Experiment' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const { id } = props.node.attrs + const { id } = props.attributes const { experiment, experimentLoading, isExperimentRunning } = useValues(experimentLogic({ experimentId: id })) const { loadExperiment } = useActions(experimentLogic({ experimentId: id })) const { expanded, nextNode } = useValues(notebookNodeLogic) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx index 09ab1aff3c398..066917f6f3c9a 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlag.tsx @@ -2,7 +2,7 @@ import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' import { FeatureFlagType, NotebookNodeType } from '~/types' import { BindLogic, useActions, useValues } from 'kea' import { featureFlagLogic, FeatureFlagLogicProps } from 'scenes/feature-flags/featureFlagLogic' -import { IconFlag, IconRecording, IconRocketLaunch } from 'lib/lemon-ui/icons' +import { IconFlag, IconRecording, IconRocketLaunch, IconSurveys } from 'lib/lemon-ui/icons' import clsx from 'clsx' import { LemonButton, LemonDivider } from '@posthog/lemon-ui' import { urls } from 'scenes/urls' @@ -15,21 +15,27 @@ import { FeatureFlagReleaseConditions } from 'scenes/feature-flags/FeatureFlagRe import api from 'lib/api' import { buildEarlyAccessFeatureContent } from './NotebookNodeEarlyAccessFeature' import { notebookNodeFlagLogic } from './NotebookNodeFlagLogic' +import { buildSurveyContent } from './NotebookNodeSurvey' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const { id } = props.node.attrs + const { id } = props.attributes const { featureFlag, featureFlagLoading, recordingFilterForFlag, hasEarlyAccessFeatures, newEarlyAccessFeatureLoading, + canCreateEarlyAccessFeature, + hasSurveys, + newSurveyLoading, } = useValues(featureFlagLogic({ id })) - const { createEarlyAccessFeature } = useActions(featureFlagLogic({ id })) + const { createEarlyAccessFeature, createSurvey } = useActions(featureFlagLogic({ id })) const { expanded, nextNode } = useValues(notebookNodeLogic) const { insertAfter } = useActions(notebookNodeLogic) - const { shouldDisableInsertEarlyAccessFeature } = useValues(notebookNodeFlagLogic({ id, insertAfter })) + const { shouldDisableInsertEarlyAccessFeature, shouldDisableInsertSurvey } = useValues( + notebookNodeFlagLogic({ id, insertAfter }) + ) return (
@@ -64,37 +70,67 @@ const Component = (props: NotebookNodeViewProps): JS
+ {canCreateEarlyAccessFeature && ( + } + loading={newEarlyAccessFeatureLoading} + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + + if (!hasEarlyAccessFeatures) { + createEarlyAccessFeature() + } else { + if ((featureFlag?.features?.length || 0) <= 0) { + return + } + if (!shouldDisableInsertEarlyAccessFeature(nextNode) && featureFlag.features) { + insertAfter(buildEarlyAccessFeatureContent(featureFlag.features[0].id)) + } + } + }} + disabledReason={ + shouldDisableInsertEarlyAccessFeature(nextNode) && + 'Early access feature already exists below' + } + > + {hasEarlyAccessFeatures ? 'View' : 'Create'} early access feature + + )} } - loading={newEarlyAccessFeatureLoading} + icon={} + loading={newSurveyLoading} onClick={(e) => { // prevent expanding the node if it isn't expanded e.stopPropagation() - if (!hasEarlyAccessFeatures) { - createEarlyAccessFeature() + + if (!hasSurveys) { + createSurvey() } else { - if ((featureFlag?.features?.length || 0) <= 0) { + if ((featureFlag?.surveys?.length || 0) <= 0) { return } - if (!shouldDisableInsertEarlyAccessFeature(nextNode) && featureFlag.features) { - insertAfter(buildEarlyAccessFeatureContent(featureFlag.features[0].id)) + if (!shouldDisableInsertSurvey(nextNode) && featureFlag.surveys) { + insertAfter(buildSurveyContent(featureFlag.surveys[0].id)) } } }} - disabledReason={ - shouldDisableInsertEarlyAccessFeature(nextNode) && - 'Early access feature already exists below' - } + disabledReason={shouldDisableInsertSurvey(nextNode) && 'Survey already exists below'} > - {hasEarlyAccessFeatures ? 'View' : 'Create'} early access feature + {hasSurveys ? 'View' : 'Create'} survey } - onClick={() => { + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + if (nextNode?.type.name !== NotebookNodeType.FeatureFlagCodeExample) { insertAfter(buildCodeExampleContent(id)) } @@ -107,7 +143,10 @@ const Component = (props: NotebookNodeViewProps): JS Show implementation { + onClick={(e) => { + // prevent expanding the node if it isn't expanded + e.stopPropagation() + if (nextNode?.type.name !== NotebookNodeType.RecordingPlaylist) { insertAfter(buildPlaylistContent(recordingFilterForFlag)) } diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagCodeExample.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagCodeExample.tsx index 2167d7358b3e7..6249b17f51349 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagCodeExample.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagCodeExample.tsx @@ -9,7 +9,7 @@ import { notebookNodeLogic } from './notebookNodeLogic' import api from 'lib/api' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const { id } = props.node.attrs + const { id } = props.attributes const { featureFlag } = useValues(featureFlagLogic({ id })) const { expanded } = useValues(notebookNodeLogic) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx index b597575854e69..aa0ed54d437d7 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeFlagLogic.tsx @@ -5,6 +5,7 @@ import { buildEarlyAccessFeatureContent } from './NotebookNodeEarlyAccessFeature import { NotebookNodeType } from '~/types' import type { notebookNodeFlagLogicType } from './NotebookNodeFlagLogicType' +import { buildSurveyContent } from './NotebookNodeSurvey' export type NotebookNodeFlagLogicProps = { id: FeatureFlagLogicProps['id'] @@ -17,13 +18,16 @@ export const notebookNodeFlagLogic = kea([ key(({ id }) => id), connect((props: NotebookNodeFlagLogicProps) => ({ - actions: [featureFlagLogic({ id: props.id }), ['createEarlyAccessFeatureSuccess']], - values: [featureFlagLogic({ id: props.id }), ['featureFlag', 'hasEarlyAccessFeatures']], + actions: [featureFlagLogic({ id: props.id }), ['createEarlyAccessFeatureSuccess', 'createSurveySuccess']], + values: [featureFlagLogic({ id: props.id }), ['featureFlag', 'hasEarlyAccessFeatures', 'hasSurveys']], })), listeners(({ props }) => ({ createEarlyAccessFeatureSuccess: async ({ newEarlyAccessFeature }) => { props.insertAfter(buildEarlyAccessFeatureContent(newEarlyAccessFeature.id)) }, + createSurveySuccess: async ({ newSurvey }) => { + props.insertAfter(buildSurveyContent(newSurvey.id)) + }, })), selectors({ shouldDisableInsertEarlyAccessFeature: [ @@ -39,5 +43,18 @@ export const notebookNodeFlagLogic = kea([ ) }, ], + shouldDisableInsertSurvey: [ + (s) => [s.featureFlag, s.hasSurveys], + (featureFlag, hasSurveys) => + (nextNode: Node | null): boolean => { + return ( + (nextNode?.type.name === NotebookNodeType.Survey && + hasSurveys && + featureFlag.surveys && + nextNode?.attrs.id === featureFlag.surveys[0].id) || + false + ) + }, + ], }), ]) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx index 808d4e886c0d2..8dc4e00839409 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeImage.tsx @@ -9,7 +9,7 @@ import { NotebookNodeViewProps } from '../Notebook/utils' const MAX_DEFAULT_HEIGHT = 1000 const Component = (props: NotebookNodeViewProps): JSX.Element => { - const { file, src, height } = props.node.attrs + const { file, src, height } = props.attributes const [uploading, setUploading] = useState(false) const [error, setError] = useState() @@ -79,6 +79,10 @@ export const NotebookNodeImage = createPostHogWidgetNode { + // TODO file is null when this runs... should it be? + return attrs?.file?.name || '' + }, heightEstimate: 400, minHeight: 100, resizeable: true, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx index 2de3d39f965b8..d582171f9690a 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx @@ -13,7 +13,7 @@ import { asDisplay } from 'scenes/persons/person-utils' import api from 'lib/api' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const id = props.node.attrs.id + const { id } = props.attributes const logic = personLogic({ id }) const { person, personLoading } = useValues(logic) const { expanded } = useValues(notebookNodeLogic) @@ -76,4 +76,9 @@ export const NotebookNodePerson = createPostHogWidgetNode { + const personTitle = attrs?.title || '' + const personId = attrs?.id || '' + return `${personTitle} ${personId}`.trim() + }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx index 62cd9e2505657..0b0e3b7ca4ee8 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx @@ -4,7 +4,6 @@ import { RecordingsLists, SessionRecordingsPlaylistProps, } from 'scenes/session-recordings/playlist/SessionRecordingsPlaylist' -import { useJsonNodeState } from './utils' import { addedAdvancedFilters, getDefaultFilters, @@ -12,31 +11,30 @@ import { } from 'scenes/session-recordings/playlist/sessionRecordingsListLogic' import { useActions, useValues } from 'kea' import { SessionRecordingPlayer } from 'scenes/session-recordings/player/SessionRecordingPlayer' -import { useMemo, useRef, useState } from 'react' -import { fromParamsGivenUrl, uuid } from 'lib/utils' +import { useMemo, useState } from 'react' +import { fromParamsGivenUrl } from 'lib/utils' import { LemonButton } from '@posthog/lemon-ui' -import { IconChevronLeft, IconSettings } from 'lib/lemon-ui/icons' +import { IconChevronLeft } from 'lib/lemon-ui/icons' import { urls } from 'scenes/urls' import { notebookNodeLogic } from './notebookNodeLogic' -import { JSONContent, NotebookNodeViewProps, NotebookNodeWidgetSettings } from '../Notebook/utils' +import { JSONContent, NotebookNodeViewProps, NotebookNodeAttributeProperties } from '../Notebook/utils' import { SessionRecordingsFilters } from 'scenes/session-recordings/filters/SessionRecordingsFilters' import { ErrorBoundary } from '@sentry/react' const Component = (props: NotebookNodeViewProps): JSX.Element => { - const [filters, setFilters] = useJsonNodeState( - props.node.attrs, - props.updateAttributes, - 'filters' - ) - - const playerKey = useRef(`notebook-${uuid()}`).current + const { filters, nodeId } = props.attributes + const playerKey = `notebook-${nodeId}` const recordingPlaylistLogicProps: SessionRecordingsPlaylistProps = { + logicKey: playerKey, filters, updateSearchParams: false, autoPlay: false, - mode: 'notebook', - onFiltersChange: setFilters, + onFiltersChange: (newFilters) => { + props.updateAttributes({ + filters: newFilters, + }) + }, } const { expanded } = useValues(notebookNodeLogic) @@ -48,6 +46,7 @@ const Component = (props: NotebookNodeViewProps) if (!expanded) { return
20+ recordings
} + const content = !activeSessionRecording?.id ? ( ) : ( @@ -75,12 +74,8 @@ const Component = (props: NotebookNodeViewProps) export const Settings = ({ attributes, updateAttributes, -}: NotebookNodeWidgetSettings): JSX.Element => { - const [filters, setFilters] = useJsonNodeState( - attributes, - updateAttributes, - 'filters' - ) +}: NotebookNodeAttributeProperties): JSX.Element => { + const { filters } = attributes const [showAdvancedFilters, setShowAdvancedFilters] = useState(false) const defaultFilters = getDefaultFilters() @@ -93,9 +88,9 @@ export const Settings = ({ updateAttributes({ filters })} showPropertyFilters - onReset={() => setFilters(undefined)} + onReset={() => updateAttributes({ filters: undefined })} hasAdvancedFilters={hasAdvancedFilters} showAdvancedFilters={showAdvancedFilters} setShowAdvancedFilters={setShowAdvancedFilters} @@ -105,7 +100,7 @@ export const Settings = ({ } type NotebookNodePlaylistAttributes = { - filters: FilterType + filters: RecordingFilters } export const NotebookNodePlaylist = createPostHogWidgetNode({ @@ -135,7 +130,6 @@ export const NotebookNodePlaylist = createPostHogWidgetNode, Component: Settings, }, ], diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.scss b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.scss new file mode 100644 index 0000000000000..d9f57009fa37b --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.scss @@ -0,0 +1,25 @@ +@import '../../../styles/mixins'; + +// Here we override based on NotebookNode the ph-query styling, so +// as to not change the global styling. We need the extra nesting to ensure we +// are more specific than the other insights css + +.NotebookNode.ph-query { + .insights-graph-container { + .ant-card-body { + padding: 0; + } + + .RetentionContainer { + .LineGraph { + position: relative; + } + } + } + + .funnel-insights-container { + &.non-empty-state { + min-height: initial; + } + } +} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx index 5bc8456888a68..0181bcc0eca9f 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx @@ -1,16 +1,17 @@ import { Query } from '~/queries/Query/Query' import { DataTableNode, InsightVizNode, NodeKind, QuerySchema } from '~/queries/schema' import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' -import { useValues } from 'kea' +import { useMountedLogic, useValues } from 'kea' import { InsightShortId, NotebookNodeType } from '~/types' -import { useJsonNodeState } from './utils' import { useMemo } from 'react' import { notebookNodeLogic } from './notebookNodeLogic' -import { NotebookNodeViewProps, NotebookNodeWidgetSettings } from '../Notebook/utils' +import { NotebookNodeViewProps, NotebookNodeAttributeProperties } from '../Notebook/utils' import clsx from 'clsx' -import { IconSettings } from 'lib/lemon-ui/icons' import { urls } from 'scenes/urls' import api from 'lib/api' +import { containsHogQLQuery, isHogQLQuery, isNodeWithSource } from '~/queries/utils' + +import './NotebookNodeQuery.scss' const DEFAULT_QUERY: QuerySchema = { kind: NodeKind.DataTableNode, @@ -24,19 +25,22 @@ const DEFAULT_QUERY: QuerySchema = { } const Component = (props: NotebookNodeViewProps): JSX.Element | null => { - const [query] = useJsonNodeState(props.node.attrs, props.updateAttributes, 'query') - const { expanded } = useValues(notebookNodeLogic) + const { query } = props.attributes + const nodeLogic = useMountedLogic(notebookNodeLogic) + const { expanded } = useValues(nodeLogic) const modifiedQuery = useMemo(() => { const modifiedQuery = { ...query } - if (NodeKind.DataTableNode === modifiedQuery.kind) { + if (NodeKind.DataTableNode === modifiedQuery.kind || NodeKind.SavedInsightNode === modifiedQuery.kind) { // We don't want to show the insights button for now modifiedQuery.showOpenEditorButton = false modifiedQuery.full = false modifiedQuery.showHogQLEditor = false modifiedQuery.embedded = true - } else if (NodeKind.InsightVizNode === modifiedQuery.kind) { + } + + if (NodeKind.InsightVizNode === modifiedQuery.kind || NodeKind.SavedInsightNode === modifiedQuery.kind) { modifiedQuery.showFilters = false modifiedQuery.showHeader = false modifiedQuery.showTable = false @@ -55,7 +59,7 @@ const Component = (props: NotebookNodeViewProps): J
- +
) } @@ -67,19 +71,22 @@ type NotebookNodeQueryAttributes = { export const Settings = ({ attributes, updateAttributes, -}: NotebookNodeWidgetSettings): JSX.Element => { - const [query, setQuery] = useJsonNodeState(attributes, updateAttributes, 'query') +}: NotebookNodeAttributeProperties): JSX.Element => { + const { query } = attributes const modifiedQuery = useMemo(() => { const modifiedQuery = { ...query } - if (NodeKind.DataTableNode === modifiedQuery.kind) { + if (NodeKind.DataTableNode === modifiedQuery.kind || NodeKind.SavedInsightNode === modifiedQuery.kind) { // We don't want to show the insights button for now modifiedQuery.showOpenEditorButton = false modifiedQuery.showHogQLEditor = true modifiedQuery.showResultsTable = false - modifiedQuery.showReload = true - } else if (NodeKind.InsightVizNode === modifiedQuery.kind) { + modifiedQuery.showReload = false + modifiedQuery.showElapsedTime = false + } + + if (NodeKind.InsightVizNode === modifiedQuery.kind || NodeKind.SavedInsightNode === modifiedQuery.kind) { modifiedQuery.showFilters = true modifiedQuery.showResults = false modifiedQuery.embedded = true @@ -92,11 +99,16 @@ export const Settings = ({
{ - setQuery({ ...query, source: (t as DataTableNode | InsightVizNode).source } as QuerySchema) + updateAttributes({ + query: { + ...attributes.query, + source: (t as DataTableNode | InsightVizNode).source, + } as QuerySchema, + }) }} - readOnly={false} - uniqueKey={attributes.nodeId} />
) @@ -109,13 +121,21 @@ export const NotebookNodeQuery = createPostHogWidgetNode, Component: Settings, }, ], @@ -148,4 +167,17 @@ export const NotebookNodeQuery = createPostHogWidgetNode { + let text = '' + const q = attrs.query + if (containsHogQLQuery(q)) { + if (isHogQLQuery(q)) { + text = q.query + } + if (isNodeWithSource(q)) { + text = isHogQLQuery(q.source) ? q.source.query : '' + } + } + return text + }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx index 05cab33e32484..5004ba492124a 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx @@ -15,15 +15,14 @@ import { } from 'scenes/session-recordings/playlist/SessionRecordingPreview' import { notebookNodeLogic } from './notebookNodeLogic' import { LemonSwitch } from '@posthog/lemon-ui' -import { IconSettings } from 'lib/lemon-ui/icons' -import { JSONContent, NotebookNodeViewProps, NotebookNodeWidgetSettings } from '../Notebook/utils' +import { JSONContent, NotebookNodeViewProps, NotebookNodeAttributeProperties } from '../Notebook/utils' const HEIGHT = 500 const MIN_HEIGHT = 400 const Component = (props: NotebookNodeViewProps): JSX.Element => { - const id = props.node.attrs.id - const noInspector: boolean = props.node.attrs.noInspector + const id = props.attributes.id + const noInspector: boolean = props.attributes.noInspector const recordingLogicProps: SessionRecordingPlayerProps = { ...sessionRecordingPlayerProps(id), @@ -58,7 +57,7 @@ const Component = (props: NotebookNodeViewProps export const Settings = ({ attributes, updateAttributes, -}: NotebookNodeWidgetSettings): JSX.Element => { +}: NotebookNodeAttributeProperties): JSX.Element => { return (
, Component: Settings, }, ], + serializedText: (attrs) => { + return attrs.id + }, }) export function sessionRecordingPlayerProps(id: SessionRecordingId): SessionRecordingPlayerProps { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx index ec49f4445d005..88db6f4395ffc 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeReplayTimestamp.tsx @@ -75,6 +75,12 @@ export const NotebookNodeReplayTimestamp = Node.create({ group: 'inline', atom: true, + serializedText: (attrs: NotebookNodeReplayTimestampAttrs): string => { + // timestamp is not a block so `getText` does not add a separator. + // we need to add it manually + return `${attrs.playbackTime ? formatTimestamp(attrs.playbackTime) : '00:00'}:\n` + }, + addAttributes() { return { playbackTime: { default: null, keepOnSplit: false }, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx new file mode 100644 index 0000000000000..d0b0cf87742b5 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeSurvey.tsx @@ -0,0 +1,149 @@ +import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' +import { FeatureFlagBasicType, NotebookNodeType, Survey, SurveyQuestionType } from '~/types' +import { BindLogic, useActions, useValues } from 'kea' +import { IconFlag, IconSurveys } from 'lib/lemon-ui/icons' +import { LemonButton, LemonDivider } from '@posthog/lemon-ui' +import { urls } from 'scenes/urls' +import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' +import { notebookNodeLogic } from './notebookNodeLogic' +import { JSONContent, NotebookNodeViewProps } from '../Notebook/utils' +import { buildFlagContent } from './NotebookNodeFlag' +import { defaultSurveyAppearance, surveyLogic } from 'scenes/surveys/surveyLogic' +import { StatusTag } from 'scenes/surveys/Surveys' +import { SurveyResult } from 'scenes/surveys/SurveyView' +import { SurveyAppearance } from 'scenes/surveys/SurveyAppearance' +import { SurveyReleaseSummary } from 'scenes/surveys/Survey' +import api from 'lib/api' + +const Component = (props: NotebookNodeViewProps): JSX.Element => { + const { id } = props.attributes + const { survey, surveyLoading, hasTargetingFlag } = useValues(surveyLogic({ id })) + const { expanded, nextNode } = useValues(notebookNodeLogic) + const { insertAfter } = useActions(notebookNodeLogic) + + return ( +
+ +
+ + {surveyLoading ? ( + + ) : ( + <> + {survey.name} + {/* survey has to exist in notebooks */} + + + )} +
+ + {expanded ? ( + <> + {survey.description && ( + <> + + {survey.description} + + )} + {!survey.start_date ? ( + <> + +
+ + +
+ {}} + /> +
+
+ + ) : ( + <> + {/* show results when the survey is running */} + +
+ +
+ + )} + + ) : null} + + +
+ {survey.linked_flag && ( + } + onClick={(e) => { + e.stopPropagation() + + if (nextNode?.type.name !== NotebookNodeType.FeatureFlag) { + insertAfter(buildFlagContent((survey.linked_flag as FeatureFlagBasicType).id)) + } + }} + disabledReason={ + nextNode?.type.name === NotebookNodeType.FeatureFlag && + 'Feature flag already exists below' + } + > + View Linked Flag + + )} +
+
+
+ ) +} + +type NotebookNodeSurveyAttributes = { + id: string +} + +export const NotebookNodeSurvey = createPostHogWidgetNode({ + nodeType: NotebookNodeType.Survey, + title: async (attributes) => { + const mountedLogic = surveyLogic.findMounted({ id: attributes.id }) + let title = mountedLogic?.values.survey.name || null + if (title === null) { + const retrievedSurvey: Survey = await api.surveys.get(attributes.id) + if (retrievedSurvey) { + title = retrievedSurvey.name + } + } + return title ? `Survey: ${title}` : 'Survey' + }, + Component, + heightEstimate: '3rem', + href: (attrs) => urls.survey(attrs.id), + resizeable: false, + attributes: { + id: {}, + }, + pasteOptions: { + find: urls.survey('') + '(.+)', + getAttributes: async (match) => { + return { id: match[1] } + }, + }, +}) + +export function buildSurveyContent(id: string): JSONContent { + return { + type: NotebookNodeType.Survey, + attrs: { id }, + } +} diff --git a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts index ffc51d5c1280a..4d5adb4470745 100644 --- a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts +++ b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts @@ -15,27 +15,33 @@ import { import type { notebookNodeLogicType } from './notebookNodeLogicType' import { createContext, useContext } from 'react' import { notebookLogicType } from '../Notebook/notebookLogicType' -import { CustomNotebookNodeAttributes, JSONContent, Node, NotebookNodeWidget } from '../Notebook/utils' +import { + CustomNotebookNodeAttributes, + JSONContent, + Node, + NotebookNode, + NotebookNodeAttributeProperties, + NotebookNodeAttributes, + NotebookNodeWidget, +} from '../Notebook/utils' import { NotebookNodeType } from '~/types' import posthog from 'posthog-js' export type NotebookNodeLogicProps = { - node: Node + node: NotebookNode nodeId: string nodeType: NotebookNodeType - nodeAttributes: CustomNotebookNodeAttributes - updateAttributes: (attributes: CustomNotebookNodeAttributes) => void notebookLogic: BuiltLogic getPos: () => number title: string | ((attributes: CustomNotebookNodeAttributes) => Promise) resizeable: boolean | ((attributes: CustomNotebookNodeAttributes) => boolean) widgets: NotebookNodeWidget[] startExpanded: boolean -} +} & NotebookNodeAttributeProperties async function renderTitle( title: NotebookNodeLogicProps['title'], - attrs: NotebookNodeLogicProps['nodeAttributes'] + attrs: NotebookNodeLogicProps['attributes'] ): Promise { if (typeof attrs.title === 'string' && attrs.title.length > 0) { return attrs.title @@ -46,7 +52,7 @@ async function renderTitle( const computeResizeable = ( resizeable: NotebookNodeLogicProps['resizeable'], - attrs: NotebookNodeLogicProps['nodeAttributes'] + attrs: NotebookNodeLogicProps['attributes'] ): boolean => (typeof resizeable === 'function' ? resizeable(attrs) : resizeable) export const notebookNodeLogic = kea([ @@ -59,12 +65,11 @@ export const notebookNodeLogic = kea([ setResizeable: (resizeable: boolean) => ({ resizeable }), insertAfter: (content: JSONContent) => ({ content }), insertAfterLastNodeOfType: (nodeType: string, content: JSONContent) => ({ content, nodeType }), - updateAttributes: (attributes: CustomNotebookNodeAttributes) => ({ attributes }), + updateAttributes: (attributes: Partial>) => ({ attributes }), insertReplayCommentByTimestamp: (timestamp: number, sessionRecordingId: string) => ({ timestamp, sessionRecordingId, }), - setWidgetsVisible: (visible: boolean) => ({ visible }), setPreviousNode: (node: Node | null) => ({ node }), setNextNode: (node: Node | null) => ({ node }), deleteNode: true, @@ -106,22 +111,12 @@ export const notebookNodeLogic = kea([ setNextNode: (_, { node }) => node, }, ], - widgetsVisible: [ - false, - { - setWidgetsVisible: (_, { visible }) => visible, - }, - ], })), selectors({ notebookLogic: [(_, p) => [p.notebookLogic], (notebookLogic) => notebookLogic], - nodeAttributes: [(_, p) => [p.nodeAttributes], (nodeAttributes) => nodeAttributes], + nodeAttributes: [(_, p) => [p.attributes], (nodeAttributes) => nodeAttributes], widgets: [(_, p) => [p.widgets], (widgets) => widgets], - isShowingWidgets: [ - (s, p) => [s.widgetsVisible, p.widgets], - (widgetsVisible, widgets) => !!widgets.length && widgetsVisible, - ], }), listeners(({ actions, values, props }) => ({ @@ -175,9 +170,9 @@ export const notebookNodeLogic = kea([ afterMount(async (logic) => { logic.props.notebookLogic.actions.registerNodeLogic(logic as any) - const renderedTitle = await renderTitle(logic.props.title, logic.props.nodeAttributes) + const renderedTitle = await renderTitle(logic.props.title, logic.props.attributes) logic.actions.setTitle(renderedTitle) - const resizeable = computeResizeable(logic.props.resizeable, logic.props.nodeAttributes) + const resizeable = computeResizeable(logic.props.resizeable, logic.props.attributes) logic.actions.setResizeable(resizeable) logic.actions.updateAttributes({ title: renderedTitle }) }), diff --git a/frontend/src/scenes/notebooks/Nodes/utils.tsx b/frontend/src/scenes/notebooks/Nodes/utils.tsx index 1a845f0ab7c40..c4becf3bd6d23 100644 --- a/frontend/src/scenes/notebooks/Nodes/utils.tsx +++ b/frontend/src/scenes/notebooks/Nodes/utils.tsx @@ -2,28 +2,9 @@ import { ExtendedRegExpMatchArray, NodeViewProps, PasteRule } from '@tiptap/core import posthog from 'posthog-js' import { NodeType } from '@tiptap/pm/model' import { Editor as TTEditor } from '@tiptap/core' - -export function useJsonNodeState( - attributes: NodeViewProps['node']['attrs'], - updateAttributes: NodeViewProps['updateAttributes'], - key: string -): [T, (value: T) => void] { - let value = attributes[key] - try { - value = typeof value === 'string' ? JSON.parse(value) : value - } catch (e) { - console.error("Couldn't parse query", e) - value = {} - } - - const setValue = (value: any): void => { - updateAttributes({ - [key]: JSON.stringify(value), - }) - } - - return [value, setValue] -} +import { CustomNotebookNodeAttributes, NotebookNodeAttributes } from '../Notebook/utils' +import { useCallback, useMemo, useRef } from 'react' +import { tryJsonParse, uuid } from 'lib/utils' export function createUrlRegex(path: string | RegExp, origin?: string): RegExp { origin = (origin || window.location.origin).replace('.', '\\.') @@ -111,3 +92,48 @@ export function selectFile(options: { contentType: string; multiple: boolean }): input.click() }) } + +export function useSyncedAttributes( + props: NodeViewProps +): [NotebookNodeAttributes, (attrs: Partial>) => void] { + const nodeId = useMemo(() => props.node.attrs.nodeId ?? uuid(), [props.node.attrs.nodeId]) + const previousNodeAttrs = useRef() + const parsedAttrs = useRef>({} as NotebookNodeAttributes) + + if (previousNodeAttrs.current !== props.node.attrs) { + const newParsedAttrs = {} + + Object.keys(props.node.attrs).forEach((key) => { + if (previousNodeAttrs.current?.[key] !== props.node.attrs[key]) { + // If changed, set it whilst trying to parse + newParsedAttrs[key] = tryJsonParse(props.node.attrs[key], props.node.attrs[key]) + } else if (parsedAttrs.current) { + // Otherwise use the old value to preserve object equality + newParsedAttrs[key] = parsedAttrs.current[key] + } + }) + + parsedAttrs.current = newParsedAttrs as NotebookNodeAttributes + parsedAttrs.current.nodeId = nodeId + } + + previousNodeAttrs.current = props.node.attrs + + const updateAttributes = useCallback( + (attrs: Partial>): void => { + // We call the update whilst json stringifying + const stringifiedAttrs = Object.keys(attrs).reduce( + (acc, x) => ({ + ...acc, + [x]: attrs[x] && typeof attrs[x] === 'object' ? JSON.stringify(attrs[x]) : attrs[x], + }), + {} + ) + + props.updateAttributes(stringifiedAttrs) + }, + [props.updateAttributes] + ) + + return [parsedAttrs.current, updateAttributes] +} diff --git a/frontend/src/scenes/notebooks/Notebook/BacklinkCommands.tsx b/frontend/src/scenes/notebooks/Notebook/BacklinkCommands.tsx index 49badffaf69e5..ef925ef805870 100644 --- a/frontend/src/scenes/notebooks/Notebook/BacklinkCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/BacklinkCommands.tsx @@ -6,6 +6,7 @@ import { PluginKey } from '@tiptap/pm/state' import { Popover } from 'lib/lemon-ui/Popover' import { forwardRef } from 'react' import { + TaxonomicDefinitionTypes, TaxonomicFilterGroup, TaxonomicFilterGroupType, TaxonomicFilterLogicProps, @@ -41,18 +42,18 @@ const BacklinkCommands = forwardRef(functi const { editor } = useValues(notebookLogic) const onSelect = ( - { type }: TaxonomicFilterGroup, + group: TaxonomicFilterGroup, value: TaxonomicFilterValue, - { id, name }: { id: number; name: string } + item: TaxonomicDefinitionTypes ): void => { if (!editor) { return } const attrs = { - id: type === TaxonomicFilterGroupType.Events ? id : value, - title: name, - type: type, + id: group.type === TaxonomicFilterGroupType.Events ? item.id : value, + title: group.getName?.(item), + type: group.type, } editor @@ -81,6 +82,7 @@ const BacklinkCommands = forwardRef(functi TaxonomicFilterGroupType.FeatureFlags, TaxonomicFilterGroupType.Experiments, TaxonomicFilterGroupType.Dashboards, + TaxonomicFilterGroupType.Notebooks, ], optionsFromProp: undefined, popoverEnabled: true, diff --git a/frontend/src/scenes/notebooks/Notebook/Editor.tsx b/frontend/src/scenes/notebooks/Notebook/Editor.tsx index 3a270947728fb..2a41bcce88209 100644 --- a/frontend/src/scenes/notebooks/Notebook/Editor.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Editor.tsx @@ -3,7 +3,7 @@ import { useActions } from 'kea' import { useCallback, useRef } from 'react' import { Editor as TTEditor } from '@tiptap/core' -import { useEditor, EditorContent } from '@tiptap/react' +import { EditorContent, useEditor } from '@tiptap/react' import { FloatingMenu } from '@tiptap/extension-floating-menu' import StarterKit from '@tiptap/starter-kit' import ExtensionPlaceholder from '@tiptap/extension-placeholder' @@ -25,10 +25,11 @@ import { lemonToast } from '@posthog/lemon-ui' import { NotebookNodeType } from '~/types' import { NotebookNodeImage } from '../Nodes/NotebookNodeImage' -import { JSONContent, NotebookEditor, EditorFocusPosition, EditorRange, Node } from './utils' +import { EditorFocusPosition, EditorRange, JSONContent, Node, NotebookEditor, textContent } from './utils' import { SlashCommandsExtension } from './SlashCommands' import { BacklinkCommandsExtension } from './BacklinkCommands' import { NotebookNodeEarlyAccessFeature } from '../Nodes/NotebookNodeEarlyAccessFeature' +import { NotebookNodeSurvey } from '../Nodes/NotebookNodeSurvey' const CustomDocument = ExtensionDocument.extend({ content: 'heading block*', @@ -92,6 +93,7 @@ export function Editor({ NotebookNodeFlag, NotebookNodeExperiment, NotebookNodeEarlyAccessFeature, + NotebookNodeSurvey, NotebookNodeImage, SlashCommandsExtension, BacklinkCommandsExtension, @@ -180,6 +182,8 @@ export function Editor({ onCreate({ getJSON: () => editor.getJSON(), + getText: () => textContent(editor.state.doc), + getEndPosition: () => editor.state.doc.content.size, getSelectedNode: () => editor.state.doc.nodeAt(editor.state.selection.$anchor.pos), getAdjacentNodes: (pos: number) => getAdjacentNodes(editor, pos), setEditable: (editable: boolean) => queueMicrotask(() => editor.setEditable(editable, false)), @@ -197,6 +201,10 @@ export function Editor({ editor.commands.scrollIntoView() } }, + pasteContent: (position: number, text: string) => { + editor?.chain().focus().setTextSelection(position).run() + editor?.view.pasteText(text) + }, findNode: (position: number) => findNode(editor, position), findNodePositionByAttrs: (attrs: Record) => findNodePositionByAttrs(editor, attrs), nextNode: (position: number) => nextNode(editor, position), diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index 9b257616c581a..f272cacab53be 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -1,4 +1,8 @@ .Notebook { + flex: 1; + display: flex; + flex-direction: column; + .NotebookEditor { flex: 1; width: 100%; @@ -28,7 +32,15 @@ height: 0; } - > ul, + ul { + list-style-type: disc; + } + + ol { + list-style-type: decimal; + } + + ul, ol { padding-left: 1rem; @@ -36,11 +48,11 @@ p { margin-bottom: 0.2rem; } - } - } - > ul { - list-style: initial; + > p { + display: inline-block; + } + } } > pre { @@ -103,9 +115,10 @@ } &--editable { - .NotebookEditor { + .NotebookEditor .ProseMirror { // Add some padding to help clicking below the last element padding-bottom: 10rem; + flex: 1; } } @@ -137,6 +150,11 @@ position: sticky; align-self: flex-start; top: 65px; + + &__content { + max-height: calc(100vh - 220px); + overflow: auto; + } } .LemonTable__content > table > thead { diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx b/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx index b19845fe0b917..ecceb26e1ec93 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx @@ -5,6 +5,193 @@ import { router } from 'kea-router' import { urls } from 'scenes/urls' import { App } from 'scenes/App' import notebook12345Json from './__mocks__/notebook-12345.json' +import { notebookTestTemplate } from './__mocks__/notebook-template-for-snapshot' +import { NotebookType } from '~/types' + +// a list of test cases to run, showing different types of content in notebooks +const testCases: Record = { + 'api/projects/:team_id/notebooks/text-formats': notebookTestTemplate('text-formats', [ + { + type: 'paragraph', + content: [ + { + type: 'text', + marks: [ + { + type: 'bold', + }, + ], + text: ' bold ', + }, + ], + }, + { + type: 'paragraph', + content: [ + { + type: 'text', + marks: [ + { + type: 'italic', + }, + ], + text: 'italic', + }, + ], + }, + { + type: 'paragraph', + content: [ + { + type: 'text', + marks: [ + { + type: 'bold', + }, + { + type: 'italic', + }, + ], + text: 'bold _and_ italic', + }, + ], + }, + { + type: 'paragraph', + content: [ + { + type: 'text', + marks: [ + { + type: 'code', + }, + ], + text: 'code', + }, + ], + }, + ]), + 'api/projects/:team_id/notebooks/headings': notebookTestTemplate('headings', [ + { + type: 'heading', + attrs: { + level: 1, + }, + content: [ + { + type: 'text', + text: 'Heading 1', + }, + ], + }, + { + type: 'heading', + attrs: { + level: 2, + }, + content: [ + { + type: 'text', + text: 'Heading 2', + }, + ], + }, + { + type: 'heading', + attrs: { + level: 3, + }, + content: [ + { + type: 'text', + text: 'Heading 3', + }, + ], + }, + ]), + 'api/projects/:team_id/notebooks/numbered-list': notebookTestTemplate('numbered-list', [ + { + type: 'orderedList', + content: [ + { + type: 'listItem', + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: 'first item', + }, + ], + }, + ], + }, + { + type: 'listItem', + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: 'second item', + }, + ], + }, + ], + }, + ], + }, + ]), + 'api/projects/:team_id/notebooks/bullet-list': notebookTestTemplate('bullet-list', [ + { + type: 'bulletList', + content: [ + { + type: 'listItem', + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: 'first item', + }, + ], + }, + ], + }, + { + type: 'listItem', + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: 'second item', + }, + ], + }, + ], + }, + ], + }, + ]), + 'api/projects/:team_id/notebooks/recordings-playlist': notebookTestTemplate('recordings-playlist', [ + { + type: 'ph-recording-playlist', + attrs: { + height: null, + title: 'Session replays', + nodeId: '41faad12-499f-4a4b-95f7-3a36601317cc', + filters: + '{"session_recording_duration":{"type":"recording","key":"duration","value":3600,"operator":"gt"},"properties":[],"events":[],"actions":[],"date_from":"-7d","date_to":null}', + }, + }, + ]), +} const meta: Meta = { title: 'Scenes-App/Notebooks', @@ -15,6 +202,25 @@ const meta: Meta = { }, decorators: [ mswDecorator({ + post: { + 'api/projects/:team_id/query': { + clickhouse: + "SELECT nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id, any(events.properties) AS properties FROM events WHERE and(equals(events.team_id, 1), in(events.event, [%(hogql_val_0)s, %(hogql_val_1)s]), ifNull(in(session_id, [%(hogql_val_2)s]), 0), ifNull(greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_3)s), %(hogql_val_4)s), 0), ifNull(lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_5)s), %(hogql_val_6)s), 0)) GROUP BY session_id LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=True", + columns: ['session_id', 'properties'], + hogql: "SELECT properties.$session_id AS session_id, any(properties) AS properties FROM events WHERE and(in(event, ['$pageview', '$autocapture']), in(session_id, ['018a8a51-a39d-7b18-897f-94054eec5f61']), greaterOrEquals(timestamp, '2023-09-11 16:55:36'), lessOrEquals(timestamp, '2023-09-13 18:07:40')) GROUP BY session_id LIMIT 100", + query: "SELECT properties.$session_id as session_id, any(properties) as properties\n FROM events\n WHERE event IN ['$pageview', '$autocapture']\n AND session_id IN ['018a8a51-a39d-7b18-897f-94054eec5f61']\n -- the timestamp range here is only to avoid querying too much of the events table\n -- we don't really care about the absolute value, \n -- but we do care about whether timezones have an odd impact\n -- so, we extend the range by a day on each side so that timezones don't cause issues\n AND timestamp >= '2023-09-11 16:55:36'\n AND timestamp <= '2023-09-13 18:07:40'\n GROUP BY session_id", + results: [ + [ + '018a8a51-a39d-7b18-897f-94054eec5f61', + '{"$os":"Mac OS X","$os_version":"10.15.7","$browser":"Chrome","$device_type":"Desktop","$current_url":"http://localhost:8000/ingestion/platform","$host":"localhost:8000","$pathname":"/ingestion/platform","$browser_version":116,"$browser_language":"en-GB","$screen_height":982,"$screen_width":1512,"$viewport_height":827,"$viewport_width":1498,"$lib":"web","$lib_version":"1.78.2","$insert_id":"249xj40dkv7x9knp","$time":1694537723.201,"distinct_id":"uLI7S0z6rWQIKAjgXhdUBplxPYymuQqxH5QbJKe2wqr","$device_id":"018a8a51-a39c-78f9-a4e4-1183f059f7cc","$user_id":"uLI7S0z6rWQIKAjgXhdUBplxPYymuQqxH5QbJKe2wqr","is_demo_project":false,"$groups":{"project":"018a8a51-9ee3-0000-0369-ff1924dcba89","organization":"018a8a51-988e-0000-d3e6-477c7cc111f1","instance":"http://localhost:8000"},"$autocapture_disabled_server_side":false,"$active_feature_flags":[],"$feature_flag_payloads":{},"realm":"hosted-clickhouse","email_service_available":false,"slack_service_available":false,"$referrer":"http://localhost:8000/signup","$referring_domain":"localhost:8000","$event_type":"click","$ce_version":1,"token":"phc_awewGgfgakHbaSbprHllKajqoa6iP2nz7OAUou763ie","$session_id":"018a8a51-a39d-7b18-897f-94054eec5f61","$window_id":"018a8a51-a39d-7b18-897f-940673bea28c","$set_once":{"$initial_os":"Mac OS X","$initial_browser":"Chrome","$initial_device_type":"Desktop","$initial_current_url":"http://localhost:8000/ingestion/platform","$initial_pathname":"/ingestion/platform","$initial_browser_version":116,"$initial_referrer":"http://localhost:8000/signup","$initial_referring_domain":"localhost:8000"},"$sent_at":"2023-09-12T16:55:23.743000+00:00","$ip":"127.0.0.1","$group_0":"018a8a51-9ee3-0000-0369-ff1924dcba89","$group_1":"018a8a51-988e-0000-d3e6-477c7cc111f1","$group_2":"http://localhost:8000"}', + ], + ], + types: [ + ['session_id', 'Nullable(String)'], + ['properties', 'String'], + ], + }, + }, get: { 'api/projects/:team_id/notebooks': { count: 1, @@ -66,6 +272,76 @@ const meta: Meta = { ], }, 'api/projects/:team_id/notebooks/12345': notebook12345Json, + 'api/projects/:team_id/session_recordings': { + results: [ + { + id: '018a8a51-a39d-7b18-897f-94054eec5f61', + distinct_id: 'uLI7S0z6rWQIKAjgXhdUBplxPYymuQqxH5QbJKe2wqr', + viewed: true, + recording_duration: 4324, + active_seconds: 21, + inactive_seconds: 4302, + start_time: '2023-09-12T16:55:36.404000Z', + end_time: '2023-09-12T18:07:40.147000Z', + click_count: 3, + keypress_count: 0, + mouse_activity_count: 924, + console_log_count: 37, + console_warn_count: 7, + console_error_count: 9, + start_url: 'http://localhost:8000/replay/recent', + person: { + id: 1, + name: 'paul@posthog.com', + distinct_ids: [ + 'uLI7S0z6rWQIKAjgXhdUBplxPYymuQqxH5QbJKe2wqr', + '018a8a51-a39c-78f9-a4e4-1183f059f7cc', + ], + properties: { + email: 'paul@posthog.com', + $initial_os: 'Mac OS X', + $geoip_latitude: -33.8715, + $geoip_city_name: 'Sydney', + $geoip_longitude: 151.2006, + $geoip_time_zone: 'Australia/Sydney', + $initial_browser: 'Chrome', + $initial_pathname: '/', + $initial_referrer: 'http://localhost:8000/signup', + $geoip_postal_code: '2000', + $creator_event_uuid: '018a8a51-a39d-7b18-897f-9407e795547b', + $geoip_country_code: 'AU', + $geoip_country_name: 'Australia', + $initial_current_url: 'http://localhost:8000/', + $initial_device_type: 'Desktop', + $geoip_continent_code: 'OC', + $geoip_continent_name: 'Oceania', + $initial_geoip_latitude: -33.8715, + $initial_browser_version: 116, + $initial_geoip_city_name: 'Sydney', + $initial_geoip_longitude: 151.2006, + $initial_geoip_time_zone: 'Australia/Sydney', + $geoip_subdivision_1_code: 'NSW', + $geoip_subdivision_1_name: 'New South Wales', + $initial_referring_domain: 'localhost:8000', + $initial_geoip_postal_code: '2000', + $initial_geoip_country_code: 'AU', + $initial_geoip_country_name: 'Australia', + $initial_geoip_continent_code: 'OC', + $initial_geoip_continent_name: 'Oceania', + $initial_geoip_subdivision_1_code: 'NSW', + $initial_geoip_subdivision_1_name: 'New South Wales', + }, + created_at: '2023-09-12T16:55:20.736000Z', + uuid: '018a8a51-a3d3-0000-e8fa-94621f9ddd48', + }, + storage: 'clickhouse', + pinned_count: 0, + }, + ], + has_next: false, + version: 3, + }, + ...testCases, }, }), ], @@ -78,6 +354,41 @@ export function NotebooksList(): JSX.Element { return } +export function Headings(): JSX.Element { + useEffect(() => { + router.actions.push(urls.notebook('headings')) + }, []) + return +} + +export function TextFormats(): JSX.Element { + useEffect(() => { + router.actions.push(urls.notebook('text-formats')) + }, []) + return +} + +export function NumberedList(): JSX.Element { + useEffect(() => { + router.actions.push(urls.notebook('numbered-list')) + }, []) + return +} + +export function BulletList(): JSX.Element { + useEffect(() => { + router.actions.push(urls.notebook('bullet-list')) + }, []) + return +} + +export function RecordingsPlaylist(): JSX.Element { + useEffect(() => { + router.actions.push(urls.notebook('recordings-playlist')) + }, []) + return +} + export function TextOnlyNotebook(): JSX.Element { useEffect(() => { router.actions.push(urls.notebook('12345')) diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx index afc5906272451..6e9adb4825f7b 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx @@ -13,9 +13,8 @@ import { NotebookConflictWarning } from './NotebookConflictWarning' import { NotebookLoadingState } from './NotebookLoadingState' import { Editor } from './Editor' import { EditorFocusPosition } from './utils' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' -import { FEATURE_FLAGS } from 'lib/constants' import { NotebookSidebar } from './NotebookSidebar' +import { ErrorBoundary } from '~/layout/ErrorBoundary' export type NotebookProps = { shortId: string @@ -98,26 +97,26 @@ export function Notebook({ shortId, editable = false, initialAutofocus = null }: ) : null}
- - - - { - if (node.type.name === 'heading' && node.attrs.level === 1) { - return `Untitled - maybe.. "${headingPlaceholder}"` - } + + + { + if (node.type.name === 'heading' && node.attrs.level === 1) { + return `Untitled - maybe.. "${headingPlaceholder}"` + } - if (node.type.name === 'heading') { - return `Heading ${node.attrs.level}` - } + if (node.type.name === 'heading') { + return `Heading ${node.attrs.level}` + } - return '' - }} - /> + return '' + }} + /> +
diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx index 7eee4473e915d..e3e525a8cda9d 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookListMini.tsx @@ -1,10 +1,9 @@ import { LemonButton } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' -import { IconPlus, IconJournal } from 'lib/lemon-ui/icons' +import { useValues } from 'kea' +import { IconJournal } from 'lib/lemon-ui/icons' import { notebooksModel } from '~/models/notebooksModel' -import { LemonMenu, LemonMenuItems } from 'lib/lemon-ui/LemonMenu' import { NotebookListItemType } from '~/types' -import { useCallback } from 'react' +import { NotebookSelectPopover } from '../NotebookSelectButton/NotebookSelectButton' export type NotebookListMiniProps = { selectedNotebookId?: string @@ -12,19 +11,8 @@ export type NotebookListMiniProps = { onNewNotebook?: () => void } -export function NotebookListMini({ - selectedNotebookId, - onSelectNotebook, - onNewNotebook, -}: NotebookListMiniProps): JSX.Element { - const { notebooks, notebookTemplates, notebooksLoading, scratchpadNotebook } = useValues(notebooksModel) - const { loadNotebooks } = useActions(notebooksModel) - - const onVisibilityChange = useCallback((visible: boolean): void => { - if (visible && !notebooksLoading) { - loadNotebooks() - } - }, []) +export function NotebookListMini({ selectedNotebookId }: NotebookListMiniProps): JSX.Element { + const { notebooks, notebookTemplates } = useValues(notebooksModel) const selectedTitle = selectedNotebookId === 'scratchpad' @@ -33,51 +21,11 @@ export function NotebookListMini({ notebooks.find((notebook) => notebook.short_id === selectedNotebookId)?.title || 'Untitled' - const items: LemonMenuItems = [ - { - items: [ - { - label: 'Scratchpad', - onClick: () => onSelectNotebook(scratchpadNotebook), - active: selectedNotebookId === 'scratchpad', - }, - ], - }, - { - items: notebooks.length - ? notebooks.map((notebook) => ({ - label: notebook.title ?? `Untitled (${notebook.short_id})`, - onClick: () => onSelectNotebook(notebook), - active: notebook.short_id === selectedNotebookId, - })) - : [ - { - label: notebooksLoading ? 'Loading notebooks...' : 'No notebooks', - disabledReason: 'No notebooks found', - onClick: () => {}, - }, - ], - }, - ] - - if (onNewNotebook) { - items.push({ - items: [ - { - label: 'New notebook', - status: 'primary', - icon: , - onClick: () => onNewNotebook(), - }, - ], - }) - } - return ( - + } status="primary-alt" sideIcon={null}> {selectedTitle || 'Notebooks'} - + ) } diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss index fbf999fa54ede..634259e3389d9 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss +++ b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss @@ -32,18 +32,29 @@ max-width: calc(100vw - 2rem); width: 50rem; // This will be controlable pointer-events: all; - display: flex; flex-direction: column; - border-radius: var(--radius); - background-color: var(--bg-light); - border: 1px solid var(--border-3000); - transition: transform var(--notebook-popover-transition-properties), box-shadow 150ms linear, - width var(--notebook-popover-transition-properties); // Transition properties that are overwritten transform: translateX(calc(100% + 1rem)); - box-shadow: 0px 16px 16px rgba(0, 0, 0, 0); + transition: transform var(--notebook-popover-transition-properties), + width var(--notebook-popover-transition-properties); + + > * + * { + margin-top: 1rem; + } + + .NotebookPopover__content__card { + flex: 1; + display: flex; + flex-direction: column; + border-radius: var(--radius); + background-color: var(--bg-light); + border: 1px solid var(--border-3000); + box-shadow: 0px 16px 16px rgba(0, 0, 0, 0); + transition: box-shadow 150ms linear; + overflow: hidden; + } } &--visible { @@ -54,14 +65,19 @@ .NotebookPopover__content { transform: translateX(0); - box-shadow: 0px 16px 16px rgba(0, 0, 0, 0.15); + .NotebookPopover__content__card { + box-shadow: 0px 16px 16px rgba(0, 0, 0, 0.15); + } } } &--peek { .NotebookPopover__content { + transition: none; // NOTE: This shouldn't be none as it affects other transitions transform: translateX(calc(100% - 5rem)); - box-shadow: 0px 16px 16px rgba(0, 0, 0, 0.15); + .NotebookPopover__content__card { + box-shadow: 0px 16px 16px rgba(0, 0, 0, 0.15); + } } } @@ -78,3 +94,56 @@ } } } + +.NotebookPopoverDropzone { + box-shadow: 0px 16px 16px rgba(0, 0, 0, 0.15); + border: 2px dashed var(--border-3000); + border-radius: var(--radius); + + transition: all 150ms; + height: 4rem; + backdrop-filter: blur(5px); + display: flex; + + .NotebookPopoverDropzone__message { + flex: 1; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + font-weight: 700; + font-size: 1rem; + color: var(--muted-alt); + text-align: center; + pointer-events: none; + background-color: var(--bg-light); + padding: 1rem; + opacity: 0.75; + transition: all 150ms; + } + + .NotebookPopoverDropzone__dropped { + overflow: hidden; + flex: 1; + display: flex; + flex-direction: column; + } + + &--active { + border-color: var(--primary); + height: 8rem; + + .NotebookPopoverDropzone__message { + opacity: 1; + } + } + + &--dropped { + padding: 1rem; + border-color: var(--primary); + background-color: var(--bg-light); + height: 100%; + justify-content: flex-start; + align-items: initial; + } +} diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx index 9a13bef4e9dc0..1e63357eb209a 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx @@ -12,13 +12,84 @@ import { notebooksModel } from '~/models/notebooksModel' import { NotebookExpandButton, NotebookSyncInfo } from './NotebookMeta' import { notebookLogic } from './notebookLogic' import { urls } from 'scenes/urls' +import { NotebookPopoverDropzone } from './NotebookPopoverDropzone' -export function NotebookPopover(): JSX.Element { - const { visibility, shownAtLeastOnce, fullScreen, selectedNotebook, initialAutofocus, dropListeners } = +export function NotebookPopoverCard(): JSX.Element | null { + const { visibility, shownAtLeastOnce, fullScreen, selectedNotebook, initialAutofocus, droppedResource } = useValues(notebookPopoverLogic) - const { setVisibility, setFullScreen, selectNotebook, setElementRef } = useActions(notebookPopoverLogic) + const { setVisibility, setFullScreen, selectNotebook } = useActions(notebookPopoverLogic) const { createNotebook } = useActions(notebooksModel) - const { notebook, isShowingSidebar } = useValues(notebookLogic({ shortId: selectedNotebook })) + const { notebook } = useValues(notebookLogic({ shortId: selectedNotebook })) + + const editable = visibility !== 'hidden' && !notebook?.is_template + + if (droppedResource) { + return null + } + return ( +
+
+ + selectNotebook(notebook.short_id)} + onNewNotebook={() => createNotebook()} + /> + + + {selectedNotebook && } + + setVisibility('hidden')} + status="primary-alt" + icon={} + tooltip="Go to Notebook" + tooltipPlacement="left" + /> + + + + setFullScreen(!fullScreen)} + status="primary-alt" + active={fullScreen} + icon={} + tooltip="Toggle full screen" + tooltipPlacement="left" + /> + + setVisibility('hidden')} + status="primary-alt" + icon={} + tooltip="Hide Notebook Sidebar" + tooltipPlacement="left" + /> + +
+ +
+ {shownAtLeastOnce && ( + + )} +
+
+ ) +} + +export function NotebookPopover(): JSX.Element { + const { visibility, fullScreen, selectedNotebook, dropProperties } = useValues(notebookPopoverLogic) + const { setVisibility, setFullScreen, setElementRef } = useActions(notebookPopoverLogic) + const { isShowingSidebar } = useValues(notebookLogic({ shortId: selectedNotebook })) const ref = useRef(null) @@ -45,8 +116,6 @@ export function NotebookPopover(): JSX.Element { } }, [ref.current]) - const isEditable = visibility !== 'hidden' && !notebook?.is_template - return (
setVisibility('visible') : undefined} - {...dropListeners} + {...dropProperties} > -
- - selectNotebook(notebook.short_id)} - onNewNotebook={() => createNotebook()} - /> - - - {selectedNotebook && } - - setVisibility('hidden')} - status="primary-alt" - icon={} - tooltip="Go to Notebook" - tooltipPlacement="left" - /> - - - - setFullScreen(!fullScreen)} - status="primary-alt" - active={fullScreen} - icon={} - tooltip="Toggle full screen" - tooltipPlacement="left" - /> - - setVisibility('hidden')} - status="primary-alt" - icon={} - tooltip="Hide Notebook Sidebar" - tooltipPlacement="left" - /> - -
- -
- {shownAtLeastOnce && ( - - )} -
+ +
) diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookPopoverDropzone.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookPopoverDropzone.tsx new file mode 100644 index 0000000000000..7e676a9c2d6b4 --- /dev/null +++ b/frontend/src/scenes/notebooks/Notebook/NotebookPopoverDropzone.tsx @@ -0,0 +1,77 @@ +import clsx from 'clsx' +import { DragEventHandler, useState } from 'react' +import { notebookPopoverLogic } from './notebookPopoverLogic' +import { useActions, useValues } from 'kea' +import { NotebookNodeType } from '~/types' +import { NotebookSelectList } from '../NotebookSelectButton/NotebookSelectButton' +import { notebookLogicType } from './notebookLogicType' +import { LemonButton } from '@posthog/lemon-ui' + +export function NotebookPopoverDropzone(): JSX.Element | null { + const [isDragActive, setIsDragActive] = useState(false) + + const { dropMode, droppedResource } = useValues(notebookPopoverLogic) + const { setDroppedResource } = useActions(notebookPopoverLogic) + + const onDrop: DragEventHandler = (event) => { + event.preventDefault() + setIsDragActive(false) + + if (!event.dataTransfer) { + return null + } + + const text = event.dataTransfer.getData('text/plain') + const node = event.dataTransfer.getData('node') + const properties = event.dataTransfer.getData('properties') + + setDroppedResource( + node + ? { + type: node as NotebookNodeType, + attrs: properties ? JSON.parse(properties) : {}, + } + : text + ) + } + + const onNotebookOpened = (notebookLogic: notebookLogicType): void => { + setDroppedResource(null) + if (droppedResource) { + typeof droppedResource !== 'string' + ? notebookLogic.actions.insertAfterLastNode(droppedResource) + : notebookLogic.actions.pasteAfterLastNode(droppedResource) + } + } + + if (!dropMode && !droppedResource) { + return null + } + + return ( +
setIsDragActive(true)} + onDragLeave={() => setIsDragActive(false)} + onDragOver={(e) => e.preventDefault()} + onDrop={onDrop} + > + {droppedResource ? ( +
+
+

Add dropped resource to...

+ setDroppedResource(null)}> + Cancel + +
+ +
+ ) : ( +
Drop here for a different Notebook
+ )} +
+ ) +} diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx index bdef0cbe9507f..1dd9459879a42 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx @@ -6,6 +6,7 @@ import { notebookNodeLogicType } from '../Nodes/notebookNodeLogicType' export const NotebookSidebar = (): JSX.Element | null => { const { selectedNodeLogic, isShowingSidebar, isEditable } = useValues(notebookLogic) + const { setIsShowingSidebar } = useActions(notebookLogic) if (!isEditable) { return null @@ -17,24 +18,32 @@ export const NotebookSidebar = (): JSX.Element | null => { 'NotebookSidebar--showing': isShowingSidebar, })} > -
{selectedNodeLogic && }
+
+ {selectedNodeLogic && isShowingSidebar && ( + setIsShowingSidebar(false)} /> + )} +
) } -export const Widgets = ({ logic }: { logic: BuiltLogic }): JSX.Element | null => { - const { widgets, nodeAttributes, isShowingWidgets } = useValues(logic) - const { updateAttributes, setWidgetsVisible } = useActions(logic) - - if (!isShowingWidgets) { - return null - } +export const Widgets = ({ + logic, + onClose, +}: { + logic: BuiltLogic + onClose: () => void +}): JSX.Element | null => { + const { widgets, nodeAttributes } = useValues(logic) + const { updateAttributes } = useActions(logic) return (
{widgets.map(({ key, label, Component }) => ( - setWidgetsVisible(false)}> - + +
+ +
))}
diff --git a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx index 5ecd3b7951a2b..87d5ee8c1e5c2 100644 --- a/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx +++ b/frontend/src/scenes/notebooks/Notebook/SlashCommands.tsx @@ -3,7 +3,19 @@ import Suggestion from '@tiptap/suggestion' import { ReactRenderer } from '@tiptap/react' import { LemonButton, LemonDivider, lemonToast } from '@posthog/lemon-ui' -import { IconCohort, IconQueryEditor, IconRecording, IconTableChart, IconUploadFile } from 'lib/lemon-ui/icons' +import { + IconCohort, + IconRecording, + IconTableChart, + IconUploadFile, + InsightSQLIcon, + InsightsFunnelsIcon, + InsightsLifecycleIcon, + InsightsPathsIcon, + InsightsRetentionIcon, + InsightsStickinessIcon, + InsightsTrendsIcon, +} from 'lib/lemon-ui/icons' import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useState } from 'react' import { EditorCommands, EditorRange } from './utils' import { NotebookNodeType } from '~/types' @@ -57,10 +69,179 @@ const TEXT_CONTROLS: SlashCommandsItem[] = [ ] const SLASH_COMMANDS: SlashCommandsItem[] = [ + { + title: 'Trend', + search: 'trend insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'TrendsQuery', + filterTestAccounts: false, + series: [ + { + kind: 'EventsNode', + event: '$pageview', + name: '$pageview', + math: 'total', + }, + ], + interval: 'day', + trendsFilter: { + display: 'ActionsLineGraph', + }, + }, + }, + }, + }), + }, + { + title: 'Funnel', + search: 'funnel insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'FunnelsQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + }, + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + }, + ], + funnelsFilter: { + funnel_viz_type: 'steps', + }, + }, + }, + }, + }), + }, + { + title: 'Retention', + search: 'retention insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'RetentionQuery', + retentionFilter: { + period: 'Day', + total_intervals: 11, + target_entity: { + id: '$pageview', + name: '$pageview', + type: 'events', + }, + returning_entity: { + id: '$pageview', + name: '$pageview', + type: 'events', + }, + retention_type: 'retention_first_time', + }, + }, + }, + }, + }), + }, + { + title: 'Paths', + search: 'paths insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'PathsQuery', + pathsFilter: { + include_event_types: ['$pageview'], + }, + }, + }, + }, + }), + }, + { + title: 'Stickiness', + search: 'stickiness insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'StickinessQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + math: 'total', + }, + ], + stickinessFilter: {}, + }, + }, + }, + }), + }, + { + title: 'Lifecycle', + search: 'lifecycle insight', + icon: , + command: (chain) => + chain.insertContent({ + type: NotebookNodeType.Query, + attrs: { + query: { + kind: 'InsightVizNode', + source: { + kind: 'LifecycleQuery', + series: [ + { + kind: 'EventsNode', + name: '$pageview', + event: '$pageview', + math: 'total', + }, + ], + lifecycleFilter: { + shown_as: 'Lifecycle', + }, + }, + full: true, + }, + }, + }), + }, { title: 'HogQL', search: 'sql', - icon: , + icon: , command: (chain) => chain.insertContent({ type: NotebookNodeType.Query, attrs: { query: examples['HogQLTable'] } }), }, diff --git a/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-template-for-snapshot.ts b/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-template-for-snapshot.ts new file mode 100644 index 0000000000000..b87917836a5db --- /dev/null +++ b/frontend/src/scenes/notebooks/Notebook/__mocks__/notebook-template-for-snapshot.ts @@ -0,0 +1,34 @@ +import { NotebookType } from '~/types' +import { MOCK_DEFAULT_BASIC_USER } from 'lib/api.mock' +import { JSONContent } from 'scenes/notebooks/Notebook/utils' + +export const notebookTestTemplate = ( + title: string = 'Notebook for snapshots', + notebookJson: JSONContent[] +): NotebookType => ({ + short_id: 'template-introduction', + title: title, + created_at: '2023-06-02T00:00:00Z', + last_modified_at: '2023-06-02T00:00:00Z', + created_by: MOCK_DEFAULT_BASIC_USER, + last_modified_by: MOCK_DEFAULT_BASIC_USER, + version: 1, + content: { + type: 'doc', + content: [ + { + type: 'heading', + attrs: { + level: 1, + }, + content: [ + { + type: 'text', + text: title, + }, + ], + }, + ...notebookJson, + ], + }, +}) diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index 624d3d73a300e..68cb94dbc3265 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -77,13 +77,18 @@ export const notebookLogic = kea([ exportJSON: true, showConflictWarning: true, onUpdateEditor: true, + setIsShowingSidebar: (showing: boolean) => ({ showing }), registerNodeLogic: (nodeLogic: BuiltLogic) => ({ nodeLogic }), unregisterNodeLogic: (nodeLogic: BuiltLogic) => ({ nodeLogic }), setEditable: (editable: boolean) => ({ editable }), scrollToSelection: true, + pasteAfterLastNode: (content: string) => ({ + content, + }), insertAfterLastNode: (content: JSONContent) => ({ content, }), + insertAfterLastNodeOfType: (nodeType: string, content: JSONContent, knownStartingPosition) => ({ content, nodeType, @@ -162,6 +167,13 @@ export const notebookLogic = kea([ setEditable: (_, { editable }) => editable, }, ], + isShowingSidebar: [ + false, + { + setSelectedNodeId: (showing, { selectedNodeId }) => (selectedNodeId ? showing : false), + setIsShowingSidebar: (_, { showing }) => showing, + }, + ], }), loaders(({ values, props, actions }) => ({ notebook: [ @@ -174,6 +186,7 @@ export const notebookLogic = kea([ response = { ...values.scratchpadNotebook, content: {}, + text_content: null, version: 0, } } else if (props.shortId.startsWith('template-')) { @@ -206,6 +219,7 @@ export const notebookLogic = kea([ const response = await api.notebooks.update(values.notebook.short_id, { version: values.notebook.version, content: notebook.content, + text_content: values.editor?.getText() || '', title: notebook.title, }) @@ -238,6 +252,7 @@ export const notebookLogic = kea([ // We use the local content if set otherwise the notebook content. That way it supports templates, scratchpad etc. const response = await api.notebooks.create({ content: values.content || values.notebook.content, + text_content: values.editor?.getText() || '', title: values.title || values.notebook.title, }) @@ -319,7 +334,7 @@ export const notebookLogic = kea([ return ( nodeLogic.props.nodeType === type && attrEntries.every( - ([attr, value]: [string, any]) => nodeLogic.props.node.attrs?.[attr] === value + ([attr, value]: [string, any]) => nodeLogic.props.attributes?.[attr] === value ) ) }) ?? null @@ -327,10 +342,6 @@ export const notebookLogic = kea([ } }, ], - isShowingSidebar: [ - (s) => [s.selectedNodeLogic], - (selectedNodeLogic) => selectedNodeLogic?.values.isShowingWidgets, - ], }), sharedListeners(({ values, actions }) => ({ onNotebookChange: () => { @@ -356,6 +367,15 @@ export const notebookLogic = kea([ } ) }, + pasteAfterLastNode: async ({ content }) => { + await runWhenEditorIsReady( + () => !!values.editor, + () => { + const endPosition = values.editor?.getEndPosition() || 0 + values.editor?.pasteContent(endPosition, content) + } + ) + }, insertAfterLastNodeOfType: async ({ content, nodeType, knownStartingPosition }) => { await runWhenEditorIsReady( () => !!values.editor, @@ -417,6 +437,7 @@ export const notebookLogic = kea([ return } const jsonContent = values.editor.getJSON() + actions.setLocalContent(jsonContent) actions.onUpdateEditor() }, diff --git a/frontend/src/scenes/notebooks/Notebook/notebookPopoverLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookPopoverLogic.ts index d9699ee1c44fe..e5374ed08d8c6 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookPopoverLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookPopoverLogic.ts @@ -1,13 +1,13 @@ import { actions, kea, reducers, path, listeners, selectors } from 'kea' import { urlToAction } from 'kea-router' -import { RefObject } from 'react' +import { HTMLProps, RefObject } from 'react' import posthog from 'posthog-js' import { subscriptions } from 'kea-subscriptions' import { EditorFocusPosition } from './utils' import type { notebookPopoverLogicType } from './notebookPopoverLogicType' -import { NotebookPopoverVisibility } from '~/types' +import { NotebookNodeResource, NotebookPopoverVisibility } from '~/types' export const MIN_NOTEBOOK_SIDEBAR_WIDTH = 600 @@ -21,6 +21,8 @@ export const notebookPopoverLogic = kea([ setVisibility: (visibility: NotebookPopoverVisibility) => ({ visibility }), startDropMode: true, endDropMode: true, + setDropDistance: (distance: number) => ({ distance }), + setDroppedResource: (resource: NotebookNodeResource | string | null) => ({ resource }), }), reducers(() => ({ @@ -70,12 +72,31 @@ export const notebookPopoverLogic = kea([ endDropMode: () => false, }, ], + dropDistance: [ + 0, + { + startDropMode: () => -1, + endDropMode: () => -1, + setDropDistance: (_, { distance }) => distance, + }, + ], + droppedResource: [ + null as NotebookNodeResource | string | null, + { + setVisibility: (state, { visibility }) => (visibility === 'hidden' ? null : state), + setDroppedResource: (_, { resource }) => resource, + }, + ], })), selectors(({ cache, actions }) => ({ - dropListeners: [ - (s) => [s.dropMode], - (dropMode): { onDragEnter?: () => void; onDragLeave?: () => void } => { + dropProperties: [ + (s) => [s.dropMode, s.visibility, s.dropDistance], + ( + dropMode, + visibility, + dropDistance + ): Pick, 'onDragEnter' | 'onDragLeave' | 'style'> => { return dropMode ? { onDragEnter: () => { @@ -93,6 +114,9 @@ export const notebookPopoverLogic = kea([ actions.setVisibility('peek') } }, + style: { + transform: visibility === 'peek' ? `translateX(${(1 - dropDistance) * 100}%)` : undefined, + }, } : {} }, @@ -110,12 +134,25 @@ export const notebookPopoverLogic = kea([ listeners(({ cache, actions, values }) => ({ startDropMode: () => { cache.dragEntercount = 0 + cache.dragStart = null actions.setVisibility('peek') + + cache.dragListener = (event: MouseEvent) => { + if (!cache.dragStart) { + cache.dragStart = event.pageX + } + + // The drop distance is the percentage between where the drag started and where it now is + const dropDistance = (event.pageX - cache.dragStart) / window.innerWidth + actions.setDropDistance(dropDistance) + } + window.addEventListener('drag', cache.dragListener) }, endDropMode: () => { if (values.visibility === 'peek') { actions.setVisibility('hidden') } + window.removeEventListener('drag', cache.dragListener) }, })), diff --git a/frontend/src/scenes/notebooks/Notebook/utils.ts b/frontend/src/scenes/notebooks/Notebook/utils.ts index b1038ad4147a3..ed78f61d20f89 100644 --- a/frontend/src/scenes/notebooks/Notebook/utils.ts +++ b/frontend/src/scenes/notebooks/Notebook/utils.ts @@ -6,6 +6,7 @@ import { getText, JSONContent as TTJSONContent, Range as EditorRange, + TextSerializer, } from '@tiptap/core' import { Node as PMNode } from '@tiptap/pm/model' import { NodeViewProps } from '@tiptap/react' @@ -28,29 +29,33 @@ export type NotebookNodeAttributes = T & height?: string | number } -type NotebookNode = Omit & { - attrs: NotebookNodeAttributes -} +// NOTE: Pushes users to use the parsed "attributes" instead +export type NotebookNode = Omit -export type NotebookNodeWidgetSettings = { +export type NotebookNodeAttributeProperties = { attributes: NotebookNodeAttributes - updateAttributes: (attributes: Partial) => void + updateAttributes: (attributes: Partial>) => void } -export type NotebookNodeViewProps = Omit & { - node: NotebookNode -} +export type NotebookNodeViewProps = Omit< + NodeViewProps, + 'node' | 'updateAttributes' +> & + NotebookNodeAttributeProperties & { + node: NotebookNode + } export type NotebookNodeWidget = { key: string label: string - icon: JSX.Element - // using 'any' here shouldn't be necessary but I couldn't figure out how to set a generic on the notebookNodeLogic props - Component: ({ attributes, updateAttributes }: NotebookNodeWidgetSettings) => JSX.Element + // using 'any' here shouldn't be necessary but, I couldn't figure out how to set a generic on the notebookNodeLogic props + Component: ({ attributes, updateAttributes }: NotebookNodeAttributeProperties) => JSX.Element } export interface NotebookEditor { getJSON: () => JSONContent + getText: () => string + getEndPosition: () => number getSelectedNode: () => Node | null getAdjacentNodes: (pos: number) => { previous: Node | null; next: Node | null } setEditable: (editable: boolean) => void @@ -62,6 +67,7 @@ export interface NotebookEditor { deleteRange: (range: EditorRange) => EditorCommands insertContent: (content: JSONContent) => void insertContentAfterNode: (position: number, content: JSONContent) => void + pasteContent: (position: number, text: string) => void findNode: (position: number) => Node | null findNodePositionByAttrs: (attrs: Record) => any nextNode: (position: number) => { node: Node; position: number } | null @@ -83,12 +89,39 @@ export const isCurrentNodeEmpty = (editor: TTEditor): boolean => { return false } -const textContent = (node: any): string => { +export const textContent = (node: any): string => { + // we've extended the node schema to support a custom serializedText function + // each custom node type needs to implement this function, or have an alternative in the map below + const customOrTitleSerializer: TextSerializer = (props): string => { + // TipTap chooses whether to add a separator based on a couple of factors + // but, we always want a separator since this text is for search purposes + const serializedText = props.node.type.spec.serializedText(props.node.attrs) || props.node.attrs?.title || '' + if (serializedText.length > 0 && serializedText[serializedText.length - 1] !== '\n') { + return serializedText + '\n' + } + return serializedText + } + + // we want the type system to complain if we forget to add a custom serializer + const customNodeTextSerializers: Record = { + 'ph-backlink': customOrTitleSerializer, + 'ph-early-access-feature': customOrTitleSerializer, + 'ph-experiment': customOrTitleSerializer, + 'ph-feature-flag': customOrTitleSerializer, + 'ph-feature-flag-code-example': customOrTitleSerializer, + 'ph-image': customOrTitleSerializer, + 'ph-insight': customOrTitleSerializer, + 'ph-person': customOrTitleSerializer, + 'ph-query': customOrTitleSerializer, + 'ph-recording': customOrTitleSerializer, + 'ph-recording-playlist': customOrTitleSerializer, + 'ph-replay-timestamp': customOrTitleSerializer, + 'ph-survey': customOrTitleSerializer, + } + return getText(node, { - blockSeparator: ' ', - textSerializers: { - [NotebookNodeType.ReplayTimestamp]: ({ node }) => `${node.attrs.playbackTime || '00:00'}: `, - }, + blockSeparator: '\n', + textSerializers: customNodeTextSerializers, }) } diff --git a/frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.tsx b/frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.tsx deleted file mode 100644 index 93cf098ec966a..0000000000000 --- a/frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.tsx +++ /dev/null @@ -1,225 +0,0 @@ -import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' - -import { IconJournalPlus, IconPlus, IconWithCount } from 'lib/lemon-ui/icons' -import { - NotebookAddButtonLogicProps, - notebookAddButtonLogic, -} from 'scenes/notebooks/NotebookAddButton/notebookAddButtonLogic' -import { BindLogic, BuiltLogic, useActions, useValues } from 'kea' -import { LemonMenuProps } from 'lib/lemon-ui/LemonMenu/LemonMenu' -import { dayjs } from 'lib/dayjs' -import { NotebookListItemType, NotebookTarget } from '~/types' -import { notebooksModel, openNotebook } from '~/models/notebooksModel' -import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' -import { Popover } from 'lib/lemon-ui/Popover' -import { LemonInput } from 'lib/lemon-ui/LemonInput/LemonInput' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' -import { notebookLogicType } from '../Notebook/notebookLogicType' -import { notebookNodeLogicType } from '../Nodes/notebookNodeLogicType' -import { FlaggedFeature } from 'lib/components/FlaggedFeature' -import { FEATURE_FLAGS } from 'lib/constants' - -type NotebookAddButtonProps = NotebookAddButtonLogicProps & - Omit & - Pick & { - newNotebookTitle?: string - onNotebookOpened?: ( - notebookLogic: BuiltLogic, - nodeLogic?: BuiltLogic - ) => void - onClick?: () => void - } - -function NotebooksChoiceList(props: { - notebooks: NotebookListItemType[] - emptyState: string - onClick: (notebookShortId: NotebookListItemType['short_id']) => void -}): JSX.Element { - return ( -
- {props.notebooks.length === 0 ? ( -
{props.emptyState}
- ) : ( - props.notebooks.map((notebook, i) => { - return ( - props.onClick(notebook.short_id)}> - {notebook.title || `Untitled (${notebook.short_id})`} - - ) - }) - )} -
- ) -} - -function NotebooksChoicePopoverBody(props: NotebookAddButtonProps): JSX.Element { - const { notebooksLoading, containingNotebooks, allNotebooks, searchQuery } = useValues(notebookAddButtonLogic) - const { setShowPopover } = useActions(notebookAddButtonLogic) - - const openAndAddToNotebook = async (notebookShortId: string, exists: boolean): Promise => { - await openNotebook(notebookShortId, NotebookTarget.Popover, null, (theNotebookLogic) => { - if (!exists) { - theNotebookLogic.actions.insertAfterLastNode([props.resource]) - } - props.onNotebookOpened?.(theNotebookLogic) - }) - } - - if (notebooksLoading || (allNotebooks.length === 0 && containingNotebooks.length === 0)) { - return ( -
- {notebooksLoading ? ( - 'Loading...' - ) : searchQuery.length ? ( - <>No matching notebooks - ) : ( - <>You have no notebooks - )} -
- ) - } - - return ( - <> - {containingNotebooks.length ? ( - <> -
Continue in
- { - // notebook comment logic doesn't know anything about backend filtering 🤔 - return ( - searchQuery.length === 0 || - notebook.title?.toLowerCase().includes(searchQuery.toLowerCase()) - ) - })} - emptyState={searchQuery.length ? 'No matching notebooks' : 'Not already in any notebooks'} - onClick={async (notebookShortId) => { - setShowPopover(false) - await openAndAddToNotebook(notebookShortId, true) - }} - /> - - ) : null} - {allNotebooks.length > containingNotebooks.length && ( - <> -
Add to
- { - // TODO follow-up on filtering after https://github.com/PostHog/posthog/pull/17027 - const isInExisting = containingNotebooks.some( - (containingNotebook) => containingNotebook.short_id === notebook.short_id - ) - return ( - !isInExisting && - (searchQuery.length === 0 || - notebook.title?.toLowerCase().includes(searchQuery.toLowerCase())) - ) - })} - emptyState={searchQuery.length ? 'No matching notebooks' : "You don't have any notebooks"} - onClick={async (notebookShortId) => { - setShowPopover(false) - await openAndAddToNotebook(notebookShortId, false) - }} - /> - - )} - - ) -} - -function NotebookAddButtonPopover({ - // so we can pass props to the button below, without passing visible to it - visible, - ...props -}: NotebookAddButtonProps): JSX.Element { - const { resource, newNotebookTitle, children } = props - const logic = notebookAddButtonLogic({ ...props, visible }) - const { showPopover, notebooksLoading, containingNotebooks, searchQuery } = useValues(logic) - const { setShowPopover, setSearchQuery, loadContainingNotebooks } = useActions(logic) - const { createNotebook } = useActions(notebooksModel) - - const openNewNotebook = (): void => { - const title = newNotebookTitle ?? `Notes ${dayjs().format('DD/MM')}` - - createNotebook(title, NotebookTarget.Popover, [resource], (theNotebookLogic) => { - props.onNotebookOpened?.(theNotebookLogic) - loadContainingNotebooks() - }) - - setShowPopover(false) - } - - return ( - - { - setShowPopover(false) - }} - actionable - overlay={ -
- setSearchQuery(s)} - fullWidth - disabled={notebooksLoading} - /> - -
- - - -
- - } onClick={openNewNotebook}> - New notebook - -
- } - > - } - sideIcon={null} - {...props} - active={showPopover} - loading={notebooksLoading} - onClick={() => { - props.onClick?.() - setShowPopover(!showPopover) - }} - data-attr={'notebooks-add-button'} - > - {children ?? 'Add to notebook'} - -
-
- ) -} - -export function NotebookAddButton({ ...props }: NotebookAddButtonProps): JSX.Element { - // if nodeLogic is available then the button is on a resource that _is already and currently in a notebook_ - const nodeLogic = useNotebookNode() - - return ( - - {nodeLogic ? ( - } - data-attr={'notebooks-add-button-in-a-notebook'} - {...props} - onClick={() => { - props.onClick?.() - props.onNotebookOpened?.(nodeLogic.props.notebookLogic, nodeLogic) - }} - > - {props.children ?? 'Add to notebook'} - - ) : ( - - )} - - ) -} diff --git a/frontend/src/scenes/notebooks/NotebookCommentButton/notebookCommentButtonLogic.ts b/frontend/src/scenes/notebooks/NotebookCommentButton/notebookCommentButtonLogic.ts deleted file mode 100644 index 519f41368af29..0000000000000 --- a/frontend/src/scenes/notebooks/NotebookCommentButton/notebookCommentButtonLogic.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { actions, events, kea, key, listeners, path, props, reducers, selectors } from 'kea' -import { loaders } from 'kea-loaders' -import { NotebookListItemType, NotebookNodeType } from '~/types' - -import api from 'lib/api' - -import type { notebookCommentButtonLogicType } from './notebookCommentButtonLogicType' - -export interface NotebookCommentButtonProps { - sessionRecordingId: string - startVisible: boolean -} - -export const notebookCommentButtonLogic = kea([ - path((key) => ['scenes', 'session-recordings', 'NotebookCommentButton', 'multiNotebookCommentButtonLogic', key]), - props({} as NotebookCommentButtonProps), - key((props) => props.sessionRecordingId || 'no recording id yet'), - actions({ - setShowPopover: (visible: boolean) => ({ visible }), - setSearchQuery: (query: string) => ({ query }), - loadContainingNotebooks: true, - loadAllNotebooks: true, - }), - reducers(({ props }) => ({ - searchQuery: [ - '', - { - setSearchQuery: (_, { query }) => query, - }, - ], - showPopover: [ - props.startVisible, - { - setShowPopover: (_, { visible }) => visible, - }, - ], - })), - listeners(({ actions }) => ({ - setSearchQuery: () => { - actions.loadAllNotebooks() - actions.loadContainingNotebooks() - }, - })), - loaders(({ props, values }) => ({ - allNotebooks: [ - [] as NotebookListItemType[], - { - loadAllNotebooks: async (_, breakpoint) => { - breakpoint(100) - const response = await api.notebooks.list(undefined, undefined, values.searchQuery ?? undefined) - // TODO for simplicity we'll assume the results will fit into one page - return response.results - }, - }, - ], - containingNotebooks: [ - [] as NotebookListItemType[], - { - loadContainingNotebooks: async (_, breakpoint) => { - breakpoint(100) - const response = await api.notebooks.list( - [{ type: NotebookNodeType.Recording, attrs: { id: props.sessionRecordingId } }], - undefined, - values.searchQuery ?? undefined - ) - // TODO for simplicity we'll assume the results will fit into one page - return response.results - }, - }, - ], - })), - events(({ actions }) => ({ - afterMount: () => { - actions.loadAllNotebooks() - actions.loadContainingNotebooks() - }, - })), - selectors(() => ({ - notebooksLoading: [ - (s) => [s.allNotebooksLoading, s.containingNotebooksLoading], - (allNotebooksLoading, containingNotebooksLoading) => allNotebooksLoading || containingNotebooksLoading, - ], - })), -]) diff --git a/frontend/src/scenes/notebooks/NotebookScene.tsx b/frontend/src/scenes/notebooks/NotebookScene.tsx index 99bbea23881d1..fc46050a2300a 100644 --- a/frontend/src/scenes/notebooks/NotebookScene.tsx +++ b/frontend/src/scenes/notebooks/NotebookScene.tsx @@ -4,7 +4,6 @@ import { notebookLogic } from './Notebook/notebookLogic' import { Notebook } from './Notebook/Notebook' import { NotFound } from 'lib/components/NotFound' import { NotebookSceneLogicProps, notebookSceneLogic } from './notebookSceneLogic' -import { NotebookMode } from '~/types' import { LemonButton, LemonTag } from '@posthog/lemon-ui' import { notebookPopoverLogic } from './Notebook/notebookPopoverLogic' import { NotebookExpandButton, NotebookSyncInfo } from './Notebook/NotebookMeta' @@ -32,8 +31,7 @@ export const scene: SceneExport = { } export function NotebookScene(): JSX.Element { - const { notebookId, mode } = useValues(notebookSceneLogic) - const { setNotebookMode } = useActions(notebookSceneLogic) + const { notebookId } = useValues(notebookSceneLogic) const { notebook, notebookLoading, conflictWarningVisible } = useValues(notebookLogic({ shortId: notebookId })) const { exportJSON } = useActions(notebookLogic({ shortId: notebookId })) const { selectNotebook, setVisibility } = useActions(notebookPopoverLogic) @@ -65,13 +63,13 @@ export function NotebookScene(): JSX.Element { ) } - const editEnabled = !notebook?.is_template + const isTemplate = notebook?.is_template return (
- {notebook?.is_template && TEMPLATE} + {isTemplate && TEMPLATE}
@@ -89,7 +87,7 @@ export function NotebookScene(): JSX.Element { exportJSON() }, }, - editEnabled && { + !isTemplate && { label: 'Delete', icon: , status: 'danger', @@ -135,32 +133,10 @@ export function NotebookScene(): JSX.Element { > Pin to side - - {!editEnabled ? null : mode === NotebookMode.Edit ? ( - <> - setNotebookMode(NotebookMode.View)} - > - Done - - - ) : ( - <> - setNotebookMode(NotebookMode.Edit)} - > - Edit - - - )}
- +
) } diff --git a/frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.stories.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx similarity index 88% rename from frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.stories.tsx rename to frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx index 6c455e3636916..64e6fadadb513 100644 --- a/frontend/src/scenes/notebooks/NotebookAddButton/NotebookAddButton.stories.tsx +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx @@ -1,13 +1,13 @@ import { Meta, StoryFn } from '@storybook/react' -import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton' +import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { useFeatureFlags, useStorybookMocks } from '~/mocks/browser' import { NotebookNodeType } from '~/types' import { FEATURE_FLAGS } from 'lib/constants' export default { - title: 'Scenes-App/Notebooks/Components/Notebook Add Button', - component: NotebookAddButton, -} as Meta + title: 'Scenes-App/Notebooks/Components/Notebook Select Button', + component: NotebookSelectButton, +} as Meta const allNotebooks = [ { @@ -18,7 +18,7 @@ const allNotebooks = [ { title: 'an empty notebook', short_id: 'ghi' }, ] -const Template: StoryFn = (props) => { +const Template: StoryFn = (props) => { useFeatureFlags([FEATURE_FLAGS.NOTEBOOKS]) useStorybookMocks({ get: { @@ -49,7 +49,7 @@ const Template: StoryFn = (props) => { return ( // the button has its dropdown showing and so needs a container that will include the pop-over
- +
) } diff --git a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx new file mode 100644 index 0000000000000..dd19fe5216d5c --- /dev/null +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.tsx @@ -0,0 +1,231 @@ +import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' + +import { IconJournalPlus, IconPlus, IconWithCount } from 'lib/lemon-ui/icons' +import { + NotebookSelectButtonLogicProps, + notebookSelectButtonLogic, +} from 'scenes/notebooks/NotebookSelectButton/notebookSelectButtonLogic' +import { BuiltLogic, useActions, useValues } from 'kea' +import { dayjs } from 'lib/dayjs' +import { NotebookListItemType, NotebookTarget } from '~/types' +import { notebooksModel, openNotebook } from '~/models/notebooksModel' +import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { Popover, PopoverProps } from 'lib/lemon-ui/Popover' +import { LemonInput } from 'lib/lemon-ui/LemonInput/LemonInput' +import { notebookLogicType } from '../Notebook/notebookLogicType' +import { notebookNodeLogicType } from '../Nodes/notebookNodeLogicType' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' +import { FEATURE_FLAGS } from 'lib/constants' +import { ReactChild, useEffect } from 'react' +import { LemonDivider } from '@posthog/lemon-ui' + +export type NotebookSelectProps = NotebookSelectButtonLogicProps & { + newNotebookTitle?: string + onNotebookOpened?: ( + notebookLogic: BuiltLogic, + nodeLogic?: BuiltLogic + ) => void +} + +export type NotebookSelectPopoverProps = NotebookSelectProps & + Partial & { + children?: ReactChild + } + +export type NotebookSelectButtonProps = NotebookSelectProps & + Omit & { + onClick?: () => void + children?: ReactChild + } + +function NotebooksChoiceList(props: { + notebooks: NotebookListItemType[] + emptyState: string + onClick: (notebookShortId: NotebookListItemType['short_id']) => void +}): JSX.Element { + return ( +
+ {props.notebooks.length === 0 ? ( +
{props.emptyState}
+ ) : ( + props.notebooks.map((notebook, i) => { + return ( + props.onClick(notebook.short_id)}> + {notebook.title || `Untitled (${notebook.short_id})`} + + ) + }) + )} +
+ ) +} + +export function NotebookSelectList(props: NotebookSelectProps): JSX.Element { + const logic = notebookSelectButtonLogic({ ...props }) + + const { resource, newNotebookTitle } = props + const { notebooksLoading, notebooksContainingResource, notebooksNotContainingResource, searchQuery } = + useValues(logic) + const { setShowPopover, setSearchQuery, loadNotebooksContainingResource, loadAllNotebooks } = useActions(logic) + const { createNotebook } = useActions(notebooksModel) + + const openAndAddToNotebook = async (notebookShortId: string, exists: boolean): Promise => { + await openNotebook(notebookShortId, NotebookTarget.Popover, null, (theNotebookLogic) => { + if (!exists && props.resource) { + theNotebookLogic.actions.insertAfterLastNode([props.resource]) + } + props.onNotebookOpened?.(theNotebookLogic) + }) + } + + const openNewNotebook = (): void => { + const title = newNotebookTitle ?? `Notes ${dayjs().format('DD/MM')}` + + if (resource) { + createNotebook(title, NotebookTarget.Popover, [resource], (theNotebookLogic) => { + props.onNotebookOpened?.(theNotebookLogic) + loadNotebooksContainingResource() + }) + } + + setShowPopover(false) + } + + useEffect(() => { + if (props.resource) { + loadNotebooksContainingResource() + } + loadAllNotebooks() + }, []) + + return ( +
+
+ setSearchQuery(s)} + fullWidth + /> + } onClick={openNewNotebook}> + New notebook + +
+
+ {notebooksLoading && !notebooksNotContainingResource.length && !notebooksContainingResource.length ? ( +
+ {notebooksLoading ? ( + 'Loading...' + ) : searchQuery.length ? ( + <>No matching notebooks + ) : ( + <>You have no notebooks + )} +
+ ) : ( + <> + {resource ? ( + <> +
Continue in
+ { + setShowPopover(false) + await openAndAddToNotebook(notebookShortId, true) + }} + /> + + + ) : null} +
Add to
+ { + setShowPopover(false) + await openAndAddToNotebook(notebookShortId, false) + }} + /> + + )} +
+
+ ) +} + +export function NotebookSelectPopover({ + // so we can pass props to the button below, without passing visible to it + visible, + children, + ...props +}: NotebookSelectPopoverProps): JSX.Element { + const logic = notebookSelectButtonLogic({ ...props, visible }) + const { showPopover } = useValues(logic) + const { setShowPopover } = useActions(logic) + + return ( + setShowPopover(false)} + actionable + overlay={ +
+ +
+ } + {...props} + > + setShowPopover(true)}>{children} +
+ ) +} + +export function NotebookSelectButton({ children, ...props }: NotebookSelectButtonProps): JSX.Element { + // if nodeLogic is available then the button is on a resource that _is already and currently in a notebook_ + const nodeLogic = useNotebookNode() + const logic = notebookSelectButtonLogic({ ...props }) + const { showPopover, notebooksLoading, notebooksContainingResource } = useValues(logic) + const { loadNotebooksContainingResource } = useActions(logic) + + useEffect(() => { + if (!nodeLogic) { + loadNotebooksContainingResource() + } + }, [nodeLogic]) + + const button = ( + } + data-attr={nodeLogic ? 'notebooks-add-button-in-a-notebook' : 'notebooks-add-button'} + sideIcon={null} + {...props} + active={showPopover} + loading={notebooksLoading} + onClick={() => { + props.onClick?.() + if (nodeLogic) { + // If we are in a Notebook then we just call the callback directly + props.onNotebookOpened?.(nodeLogic.props.notebookLogic, nodeLogic) + } + }} + > + {children ?? 'Add to notebook'} + + ) + + return ( + + {nodeLogic ? ( + button + ) : ( + + {button} + + )} + + ) +} diff --git a/frontend/src/scenes/notebooks/NotebookAddButton/notebookAddButtonLogic.ts b/frontend/src/scenes/notebooks/NotebookSelectButton/notebookSelectButtonLogic.ts similarity index 50% rename from frontend/src/scenes/notebooks/NotebookAddButton/notebookAddButtonLogic.ts rename to frontend/src/scenes/notebooks/NotebookSelectButton/notebookSelectButtonLogic.ts index 729c5c07aa7ea..9bf9ab8fb5fa9 100644 --- a/frontend/src/scenes/notebooks/NotebookAddButton/notebookAddButtonLogic.ts +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/notebookSelectButtonLogic.ts @@ -1,28 +1,25 @@ -import { actions, events, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { actions, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' -import { NotebookListItemType, NotebookNodeType } from '~/types' +import { NotebookListItemType, NotebookNodeResource } from '~/types' import api from 'lib/api' -import type { notebookAddButtonLogicType } from './notebookAddButtonLogicType' +import type { notebookSelectButtonLogicType } from './notebookSelectButtonLogicType' -export interface NotebookAddButtonLogicProps { - resource: { - attrs: Record - type: NotebookNodeType - } +export interface NotebookSelectButtonLogicProps { + resource?: NotebookNodeResource // allows callers (e.g. storybook) to control starting visibility of the popover visible?: boolean } -export const notebookAddButtonLogic = kea([ - path((key) => ['scenes', 'session-recordings', 'NotebookAddButton', 'multiNotebookAddButtonLogic', key]), - props({} as NotebookAddButtonLogicProps), - key((props) => JSON.stringify(props.resource)), +export const notebookSelectButtonLogic = kea([ + path((key) => ['scenes', 'session-recordings', 'NotebookSelectButton', 'multiNotebookSelectButtonLogic', key]), + props({} as NotebookSelectButtonLogicProps), + key((props) => JSON.stringify(props.resource || 'load')), actions({ setShowPopover: (visible: boolean) => ({ visible }), setSearchQuery: (query: string) => ({ query }), - loadContainingNotebooks: true, + loadNotebooksContainingResource: true, loadAllNotebooks: true, }), reducers(({ props }) => ({ @@ -40,9 +37,10 @@ export const notebookAddButtonLogic = kea([ ], })), listeners(({ actions }) => ({ - setSearchQuery: () => { + setSearchQuery: async (_, breakpoint) => { + await breakpoint(300) actions.loadAllNotebooks() - actions.loadContainingNotebooks() + actions.loadNotebooksContainingResource() }, })), loaders(({ props, values }) => ({ @@ -57,13 +55,18 @@ export const notebookAddButtonLogic = kea([ }, }, ], - containingNotebooks: [ + notebooksContainingResource: [ [] as NotebookListItemType[], { - loadContainingNotebooks: async (_, breakpoint) => { + loadNotebooksContainingResource: async (_, breakpoint) => { breakpoint(100) + if (!props.resource) { + return [] + } const response = await api.notebooks.list( - [{ type: props.resource.type, attrs: { id: props.resource.attrs?.id } }], + props.resource + ? [{ type: props.resource.type, attrs: { id: props.resource.attrs?.id } }] + : undefined, undefined, values.searchQuery ?? undefined ) @@ -73,16 +76,18 @@ export const notebookAddButtonLogic = kea([ }, ], })), - events(({ actions }) => ({ - afterMount: () => { - actions.loadAllNotebooks() - actions.loadContainingNotebooks() - }, - })), selectors(() => ({ + notebooksNotContainingResource: [ + (s) => [s.allNotebooks, s.notebooksContainingResource], + (allNotebooks, notebooksContainingResource) => + allNotebooks.filter( + (notebook) => !notebooksContainingResource.find((n) => n.short_id === notebook.short_id) + ), + ], notebooksLoading: [ - (s) => [s.allNotebooksLoading, s.containingNotebooksLoading], - (allNotebooksLoading, containingNotebooksLoading) => allNotebooksLoading || containingNotebooksLoading, + (s) => [s.allNotebooksLoading, s.notebooksContainingResourceLoading], + (allNotebooksLoading, notebooksContainingResourceLoading) => + allNotebooksLoading || notebooksContainingResourceLoading, ], })), ]) diff --git a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx index 00ffb408ebe30..ac8f58010de68 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx +++ b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx @@ -7,6 +7,7 @@ export const fromNodeTypeToLabel: Omit, Noteboo [NotebookNodeType.FeatureFlagCodeExample]: 'Feature flag Code Examples', [NotebookNodeType.Experiment]: 'Experiments', [NotebookNodeType.EarlyAccessFeature]: 'Early Access Features', + [NotebookNodeType.Survey]: 'Surveys', [NotebookNodeType.Image]: 'Images', [NotebookNodeType.Insight]: 'Insights', [NotebookNodeType.Person]: 'Persons', diff --git a/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx b/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx index 5e58920b88617..2ce18eba28801 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx +++ b/frontend/src/scenes/notebooks/NotebooksTable/NotebooksTable.tsx @@ -103,6 +103,7 @@ export function NotebooksTable(): JSX.Element { setFilters({ search: s }) }} value={filters.search} + data-attr={'notebooks-search'} />
@@ -127,7 +128,7 @@ export function NotebooksTable(): JSX.Element {
([ }), connect({ values: [notebooksModel, ['notebookTemplates']], + actions: [notebooksModel, ['deleteNotebookSuccess']], }), reducers({ filters: [ @@ -66,6 +67,10 @@ export const notebooksTableLogic = kea([ setFilters: () => { actions.loadNotebooks() }, + deleteNotebookSuccess: () => { + // TODO at some point this will be slow enough it makes sense to patch the in-memory list but for simplicity... + actions.loadNotebooks() + }, })), selectors({ notebooksAndTemplates: [ diff --git a/frontend/src/scenes/notebooks/notebookSceneLogic.ts b/frontend/src/scenes/notebooks/notebookSceneLogic.ts index a3f34d7cf9bab..3129323a931fe 100644 --- a/frontend/src/scenes/notebooks/notebookSceneLogic.ts +++ b/frontend/src/scenes/notebooks/notebookSceneLogic.ts @@ -1,6 +1,5 @@ -import { actions, afterMount, connect, kea, key, path, props, reducers, selectors } from 'kea' -import { Breadcrumb, NotebookMode } from '~/types' -import { actionToUrl, urlToAction } from 'kea-router' +import { afterMount, connect, kea, key, path, props, selectors } from 'kea' +import { Breadcrumb } from '~/types' import type { notebookSceneLogicType } from './notebookSceneLogicType' import { notebookLogic } from './Notebook/notebookLogic' @@ -17,17 +16,6 @@ export const notebookSceneLogic = kea([ values: [notebookLogic(props), ['notebook', 'notebookLoading']], actions: [notebookLogic(props), ['loadNotebook']], })), - actions({ - setNotebookMode: (mode: NotebookMode) => ({ mode }), - }), - reducers({ - mode: [ - NotebookMode.View as NotebookMode, - { - setNotebookMode: (_, { mode }) => mode, - }, - ], - }), selectors(() => ({ notebookId: [() => [(_, props) => props], (props): string => props.shortId], @@ -48,26 +36,6 @@ export const notebookSceneLogic = kea([ ], ], })), - urlToAction(({ props, actions, values }) => ({ - [`/notebooks/${props.shortId}(/:mode)`]: ( - { mode } // url params - ) => { - const newMode = mode === 'edit' ? NotebookMode.Edit : NotebookMode.View - - if (newMode !== values.mode) { - actions.setNotebookMode(newMode) - } - }, - })), - actionToUrl(({ values, props }) => { - return { - setNotebookMode: () => { - return values.mode === NotebookMode.View - ? urls.notebook(props.shortId) - : urls.notebookEdit(props.shortId) - }, - } - }), afterMount(({ actions }) => { actions.loadNotebook() diff --git a/frontend/src/scenes/persons/Person.tsx b/frontend/src/scenes/persons/Person.tsx index 5e27636f47ac8..73c14145d77a0 100644 --- a/frontend/src/scenes/persons/Person.tsx +++ b/frontend/src/scenes/persons/Person.tsx @@ -33,7 +33,7 @@ import { defaultDataTableColumns } from '~/queries/nodes/DataTable/utils' import { IconInfo } from 'lib/lemon-ui/icons' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { PersonDashboard } from './PersonDashboard' -import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton' +import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' export const scene: SceneExport = { component: Person, @@ -143,7 +143,7 @@ export function Person(): JSX.Element | null { } buttons={
- {display} - { + onChange={(newTimezone: string): void => { // This is a string for a single-mode select, but typing is poor if (!preflight?.available_timezones) { throw new Error('No timezones are available') diff --git a/frontend/src/scenes/project/Settings/WebhookIntegration.tsx b/frontend/src/scenes/project/Settings/WebhookIntegration.tsx index 83ba8c9a13114..23caab8f00533 100644 --- a/frontend/src/scenes/project/Settings/WebhookIntegration.tsx +++ b/frontend/src/scenes/project/Settings/WebhookIntegration.tsx @@ -2,13 +2,18 @@ import { useEffect, useState } from 'react' import { useActions, useValues } from 'kea' import { teamLogic } from 'scenes/teamLogic' import { webhookIntegrationLogic } from './webhookIntegrationLogic' -import { LemonButton, LemonInput } from '@posthog/lemon-ui' +import { LemonButton, LemonInput, Link } from '@posthog/lemon-ui' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' +import { supportLogic } from 'lib/components/Support/supportLogic' export function WebhookIntegration(): JSX.Element { const [webhook, setWebhook] = useState('') const { testWebhook, removeWebhook } = useActions(webhookIntegrationLogic) const { loading } = useValues(webhookIntegrationLogic) const { currentTeam } = useValues(teamLogic) + const { featureFlags } = useValues(featureFlagLogic) + const { openSupportForm } = useActions(supportLogic) useEffect(() => { if (currentTeam?.slack_incoming_webhook) { @@ -16,6 +21,18 @@ export function WebhookIntegration(): JSX.Element { } }, [currentTeam]) + const webhooks_blacklisted = featureFlags[FEATURE_FLAGS.WEBHOOKS_DENYLIST] + if (webhooks_blacklisted) { + return ( +
+

+ Webhooks are currently not available for your organization.{' '} + openSupportForm('support', 'apps')}>Contact support +

+
+ ) + } + return (

diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 356d9a9a06cc6..ca37ff7676737 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -481,5 +481,4 @@ export const routes: Record = { [urls.feedback()]: Scene.Feedback, [urls.feedback() + '/*']: Scene.Feedback, [urls.notebook(':shortId')]: Scene.Notebook, - [urls.notebookEdit(':shortId')]: Scene.Notebook, } diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx similarity index 93% rename from frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx rename to frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx index 1ac9395728811..1060246c67d27 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx @@ -9,7 +9,6 @@ import recordingSnapshotsJson from 'scenes/session-recordings/__mocks__/recordin import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json' import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query' import recording_playlists from './__mocks__/recording_playlists.json' -import { ReplayTabs } from '~/types' const meta: Meta = { title: 'Scenes-App/Recordings', @@ -17,6 +16,7 @@ const meta: Meta = { layout: 'fullscreen', viewMode: 'story', mockDate: '2023-02-01', + waitForSelector: '.PlayerFrame__content .replayer-wrapper iframe', }, decorators: [ mswDecorator({ @@ -81,7 +81,7 @@ const meta: Meta = { }, ] }, - '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings?limit=100': (req) => { + '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings': (req) => { const playlistId = req.params.playlist_id const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] @@ -89,6 +89,12 @@ const meta: Meta = { // without the session-recording-blob-replay feature flag, we only load via ClickHouse '/api/projects/:team/session_recordings/:id/snapshots': recordingSnapshotsJson, '/api/projects/:team/session_recordings/:id': recordingMetaJson, + 'api/projects/:team/notebooks': { + count: 0, + next: null, + previous: null, + results: [], + }, }, post: { '/api/projects/:team/query': recordingEventsJson, @@ -97,16 +103,10 @@ const meta: Meta = { ], } export default meta -export function RecordingsList(): JSX.Element { - useEffect(() => { - router.actions.push(urls.replay()) - }, []) - return -} -export function RecordingsPlayLists(): JSX.Element { +export function RecentRecordings(): JSX.Element { useEffect(() => { - router.actions.push(urls.replay(ReplayTabs.Playlists)) + router.actions.push(urls.replay()) }, []) return } diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx new file mode 100644 index 0000000000000..657fbccf4bc29 --- /dev/null +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx @@ -0,0 +1,48 @@ +import { Meta } from '@storybook/react' +import { useEffect } from 'react' +import { mswDecorator } from '~/mocks/browser' +import { router } from 'kea-router' +import { urls } from 'scenes/urls' +import { App } from 'scenes/App' +import recording_playlists from './__mocks__/recording_playlists.json' +import { ReplayTabs } from '~/types' +import recordings from 'scenes/session-recordings/__mocks__/recordings.json' +import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query' + +const meta: Meta = { + title: 'Scenes-App/Recordings', + parameters: { + layout: 'fullscreen', + viewMode: 'story', + mockDate: '2023-02-01', + }, + decorators: [ + mswDecorator({ + get: { + '/api/projects/:team_id/session_recording_playlists': recording_playlists, + '/api/projects/:team_id/session_recordings': (req) => { + const version = req.url.searchParams.get('version') + return [ + 200, + { + has_next: false, + results: recordings, + version, + }, + ] + }, + }, + post: { + '/api/projects/:team/query': recordingEventsJson, + }, + }), + ], +} +export default meta + +export function RecordingsPlayLists(): JSX.Element { + useEffect(() => { + router.actions.push(urls.replay(ReplayTabs.Playlists)) + }, []) + return +} diff --git a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json index f2db148045646..0afa00a98d244 100644 --- a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json +++ b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json @@ -1,6 +1,6 @@ [ { - "id": "$pageview", + "id": "$pageview1", "event": "$pageview", "name": "$event_before_recording_starts", "type": "events", @@ -14,7 +14,7 @@ "elements_hash": "" }, { - "id": "$pageview", + "id": "$pageview2", "name": "$pageview", "event": "$pageview", "type": "events", diff --git a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx index 94c1e95eb0ba6..e277a3b4a997a 100644 --- a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx +++ b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx @@ -60,7 +60,8 @@ export const AdvancedSessionRecordingsFilters = ({ { key: 'Custom', values: [] }, { key: 'Last 24 hours', values: ['-24h'] }, { key: 'Last 7 days', values: ['-7d'] }, - { key: 'Last 21 days', values: ['-21d'] }, + { key: 'Last 30 days', values: ['-30d'] }, + { key: 'All time', values: ['-90d'] }, ]} dropdownPlacement="bottom-start" /> diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx index fd023b710fc20..451f1cf616f8a 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx @@ -4,17 +4,18 @@ import { } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' import { useActions, useValues } from 'kea' import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton' -import { IconComment, IconDelete, IconLink } from 'lib/lemon-ui/icons' +import { IconComment, IconDelete, IconJournalPlus, IconLink } from 'lib/lemon-ui/icons' import { openPlayerShareDialog } from 'scenes/session-recordings/player/share/PlayerShare' import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover' import { LemonDialog } from 'lib/lemon-ui/LemonDialog' -import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton' +import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { NotebookNodeType } from '~/types' -import { dayjs } from 'lib/dayjs' +import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' export function PlayerMetaLinks(): JSX.Element { const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic) const { setPause, deleteRecording } = useActions(sessionRecordingPlayerLogic) + const nodeLogic = useNotebookNode() const getCurrentPlayerTime = (): number => { // NOTE: We pull this value at call time as otherwise it would trigger re-renders if pulled from the hook @@ -55,12 +56,11 @@ export function PlayerMetaLinks(): JSX.Element {

{![SessionRecordingPlayerMode.Sharing].includes(mode) ? ( <> - } resource={{ type: NotebookNodeType.Recording, attrs: { id: sessionRecordingId } }} onClick={() => setPause()} - newNotebookTitle={`Notes ${dayjs().format('DD/MM')}`} onNotebookOpened={(theNotebookLogic, theNodeLogic) => { const time = getCurrentPlayerTime() * 1000 @@ -74,15 +74,30 @@ export function PlayerMetaLinks(): JSX.Element { }} > Comment - + } onClick={onShare} {...commonProps}> Share - - Pin - + {nodeLogic ? ( + nodeLogic.props.nodeType !== NotebookNodeType.Recording ? ( + } + size="small" + onClick={() => { + nodeLogic.actions.insertAfter({ + type: NotebookNodeType.Recording, + attrs: { id: sessionRecordingId }, + }) + }} + /> + ) : null + ) : ( + + Pin + + )} {logicProps.playerKey !== 'modal' && ( ([ if (nextSourceToLoad) { actions.loadRecordingSnapshotsV2(nextSourceToLoad) - } else { - actions.reportUsageIfFullyLoaded() } }, loadRecordingSnapshotsV1Success: ({ sessionPlayerSnapshotData }) => { @@ -254,8 +252,6 @@ export const sessionRecordingDataLogic = kea([ if (values.sessionPlayerSnapshotData?.next) { actions.loadRecordingSnapshotsV1(values.sessionPlayerSnapshotData?.next) - } else { - actions.reportUsageIfFullyLoaded() } if (values.chunkPaginationIndex === 1 || values.loadedFromBlobStorage) { // Not always accurate that recording is playable after first chunk is loaded, but good guesstimate for now @@ -265,10 +261,12 @@ export const sessionRecordingDataLogic = kea([ size: (values.sessionPlayerSnapshotData?.snapshots ?? []).length, duration: Math.round(performance.now() - cache.snapshotsStartTime), } - - actions.reportViewed() } }, + loadRecordingSnapshotsSuccess: () => { + actions.reportViewed() + actions.reportUsageIfFullyLoaded() + }, loadRecordingSnapshotsV1Failure: () => { actions.loadRecordingSnapshotsFailure() }, diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index 04464fc9a87da..00508be3ab649 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -57,19 +57,32 @@ function UnusableEventsWarning(props: { unusableEventsInFilter: string[] }): JSX ) } +export type SessionRecordingsPlaylistProps = SessionRecordingListLogicProps & { + playlistShortId?: string + personUUID?: string + filters?: RecordingFilters + updateSearchParams?: boolean + onFiltersChange?: (filters: RecordingFilters) => void + autoPlay?: boolean + mode?: 'standard' | 'notebook' +} + export function RecordingsLists({ playlistShortId, personUUID, filters: defaultFilters, updateSearchParams, + ...props }: SessionRecordingsPlaylistProps): JSX.Element { - const logicProps = { + const logicProps: SessionRecordingListLogicProps = { + ...props, playlistShortId, personUUID, filters: defaultFilters, updateSearchParams, } const logic = sessionRecordingsListLogic(logicProps) + const { filters, hasNext, @@ -244,11 +257,11 @@ export function RecordingsLists({ data-attr={'expand-replay-listing-from-default-seven-days-to-twenty-one'} onClick={() => { setFilters({ - date_from: '-21d', + date_from: '-30d', }) }} > - Search over the last 21 days + Search over the last 30 days ) : ( @@ -285,33 +298,12 @@ export function RecordingsLists({ ) } -export type SessionRecordingsPlaylistProps = { - playlistShortId?: string - personUUID?: string - filters?: RecordingFilters - updateSearchParams?: boolean - onFiltersChange?: (filters: RecordingFilters) => void - autoPlay?: boolean - mode?: 'standard' | 'notebook' -} - export function SessionRecordingsPlaylist(props: SessionRecordingsPlaylistProps): JSX.Element { - const { - playlistShortId, - personUUID, - filters: defaultFilters, - updateSearchParams, - onFiltersChange, - autoPlay = true, - } = props + const { playlistShortId } = props const logicProps: SessionRecordingListLogicProps = { - playlistShortId, - personUUID, - filters: defaultFilters, - updateSearchParams, - autoPlay, - onFiltersChange, + ...props, + autoPlay: props.autoPlay ?? true, } const logic = sessionRecordingsListLogic(logicProps) const { diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts index 99cb664cebc18..5d44e84618b36 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts @@ -157,12 +157,8 @@ export const defaultPageviewPropertyEntityFilter = ( } } -export function generateSessionRecordingListLogicKey(props: SessionRecordingListLogicProps): string { - return `${props.key}-${props.playlistShortId}-${props.personUUID}-${props.updateSearchParams ? '-with-search' : ''}` -} - export interface SessionRecordingListLogicProps { - key?: string + logicKey?: string playlistShortId?: string personUUID?: PersonUUID filters?: RecordingFilters @@ -174,7 +170,12 @@ export interface SessionRecordingListLogicProps { export const sessionRecordingsListLogic = kea([ path((key) => ['scenes', 'session-recordings', 'playlist', 'sessionRecordingsListLogic', key]), props({} as SessionRecordingListLogicProps), - key(generateSessionRecordingListLogicKey), + key( + (props: SessionRecordingListLogicProps) => + `${props.logicKey}-${props.playlistShortId}-${props.personUUID}-${ + props.updateSearchParams ? '-with-search' : '' + }` + ), connect({ actions: [ eventUsageLogic, diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx index 6f72397bd4c54..d59ed4b674e69 100644 --- a/frontend/src/scenes/surveys/Survey.tsx +++ b/frontend/src/scenes/surveys/Survey.tsx @@ -60,7 +60,7 @@ export function SurveyComponent({ id }: { id?: string } = {}): JSX.Element { export function SurveyForm({ id }: { id: string }): JSX.Element { const { survey, surveyLoading, isEditingSurvey, hasTargetingFlag } = useValues(surveyLogic) - const { loadSurvey, editingSurvey, setHasTargetingFlag } = useActions(surveyLogic) + const { loadSurvey, editingSurvey, setSurveyValue } = useActions(surveyLogic) const { featureFlags } = useValues(enabledFeaturesLogic) return ( @@ -374,7 +374,9 @@ export function SurveyForm({ id }: { id: string }): JSX.Element { setHasTargetingFlag(true)} + onClick={() => { + setSurveyValue('targeting_flag_filters', { groups: [] }) + }} > Add user targeting @@ -389,7 +391,10 @@ export function SurveyForm({ id }: { id: string }): JSX.Element { type="secondary" status="danger" className="w-max" - onClick={() => setHasTargetingFlag(false)} + onClick={() => { + setSurveyValue('targeting_flag_filters', undefined) + setSurveyValue('targeting_flag', null) + }} > Remove all user properties diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index dfe7de4895a4b..381f766f9abae 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -10,27 +10,22 @@ import { capitalizeFirstLetter } from 'lib/utils' import { useState, useEffect } from 'react' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { Query } from '~/queries/Query/Query' -import { defaultSurveyAppearance, surveyLogic } from './surveyLogic' +import { defaultSurveyAppearance, surveyEventName, surveyLogic } from './surveyLogic' import { surveysLogic } from './surveysLogic' import { PageHeader } from 'lib/components/PageHeader' import { SurveyReleaseSummary } from './Survey' import { SurveyAppearance } from './SurveyAppearance' -import { SurveyQuestionType, SurveyType } from '~/types' +import { PropertyFilterType, PropertyOperator, Survey, SurveyQuestionType, SurveyType } from '~/types' import { SurveyAPIEditor } from './SurveyAPIEditor' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { IconOpenInNew } from 'lib/lemon-ui/icons' import { NodeKind } from '~/queries/schema' +import { dayjs } from 'lib/dayjs' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' export function SurveyView({ id }: { id: string }): JSX.Element { - const { - survey, - dataTableQuery, - surveyLoading, - surveyPlugin, - surveyMetricsQueries, - surveyDataVizQuery, - showSurveyAppWarning, - } = useValues(surveyLogic) + const { survey, surveyLoading, surveyPlugin, showSurveyAppWarning } = useValues(surveyLogic) // TODO: survey results logic // const { surveyImpressionsCount, surveyStartedCount, surveyCompletedCount } = useValues(surveyResultsLogic) const { editingSurvey, updateSurvey, launchSurvey, stopSurvey, archiveSurvey, resumeSurvey } = @@ -134,48 +129,7 @@ export function SurveyView({ id }: { id: string }): JSX.Element { ? { content: (
- {surveyMetricsQueries && ( -
-
- -
-
- -
-
- )} - {survey.questions[0].type === SurveyQuestionType.Rating && ( -
- -
- )} - {(survey.questions[0].type === SurveyQuestionType.SingleChoice || - survey.questions[0].type === SurveyQuestionType.MultipleChoice) && ( -
- {survey.questions[0].type === SurveyQuestionType.SingleChoice ? ( - - ) : ( - - )} -
- )} - {surveyLoading ? : } +
), key: 'results', @@ -305,7 +259,124 @@ export function SurveyView({ id }: { id: string }): JSX.Element { ) } +export function SurveyResult({ disableEventsTable }: { disableEventsTable?: boolean }): JSX.Element { + const { + survey, + dataTableQuery, + surveyLoading, + surveyMetricsQueries, + surveyRatingQuery, + surveyMultipleChoiceQuery, + } = useValues(surveyLogic) + const { featureFlags } = useValues(featureFlagLogic) + + return ( + <> + {surveyMetricsQueries && ( +
+
+ +
+
+ +
+
+ )} + {survey.questions[0].type === SurveyQuestionType.Rating && ( +
+ + {featureFlags[FEATURE_FLAGS.SURVEY_NPS_RESULTS] && survey.questions[0].scale === 10 && ( + <> + +

NPS Score

+ + + )} +
+ )} + {(survey.questions[0].type === SurveyQuestionType.SingleChoice || + survey.questions[0].type === SurveyQuestionType.MultipleChoice) && ( +
+ +
+ )} + {!disableEventsTable && (surveyLoading ? : )} + + ) +} + const OPT_IN_SNIPPET = `posthog.init('YOUR_PROJECT_API_KEY', { api_host: 'YOUR API HOST', opt_in_site_apps: true // <--- Add this line })` + +function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element { + return ( + + ) +} diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx index 3d5fc423f40c7..3f18b2df4e154 100644 --- a/frontend/src/scenes/surveys/Surveys.tsx +++ b/frontend/src/scenes/surveys/Surveys.tsx @@ -112,17 +112,7 @@ export function Surveys(): JSX.Element { title: 'Status', width: 100, render: function Render(_, survey: Survey) { - const statusColors = { - running: 'success', - draft: 'default', - complete: 'completion', - } as Record - const status = getSurveyStatus(survey) - return ( - - {status.toUpperCase()} - - ) + return }, }, { @@ -243,3 +233,17 @@ export function Surveys(): JSX.Element {
) } + +export function StatusTag({ survey }: { survey: Survey }): JSX.Element { + const statusColors = { + running: 'success', + draft: 'default', + complete: 'completion', + } as Record + const status = getSurveyStatus(survey) + return ( + + {status.toUpperCase()} + + ) +} diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index 4e935af2022b6..9c4fe305a8b79 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -27,7 +27,6 @@ import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' export interface NewSurvey extends Pick< Survey, - | 'id' | 'name' | 'description' | 'type' @@ -40,6 +39,7 @@ export interface NewSurvey | 'archived' | 'appearance' > { + id: 'new' linked_flag_id: number | undefined targeting_flag_filters: Pick | undefined } @@ -55,7 +55,7 @@ export const defaultSurveyAppearance = { thankYouMessageHeader: 'Thank you for your feedback!', } -const NEW_SURVEY: NewSurvey = { +export const NEW_SURVEY: NewSurvey = { id: 'new', name: '', description: '', @@ -76,73 +76,6 @@ export const surveyEventName = 'survey sent' const SURVEY_RESPONSE_PROPERTY = '$survey_response' -export const getSurveyDataQuery = (survey: Survey): DataTableNode => { - const surveyDataQuery: DataTableNode = { - kind: NodeKind.DataTableNode, - source: { - kind: NodeKind.EventsQuery, - select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'], - orderBy: ['timestamp DESC'], - where: [`event == 'survey sent' or event == '${survey.name} survey sent'`], - after: survey.created_at, - properties: [ - { - type: PropertyFilterType.Event, - key: '$survey_id', - operator: PropertyOperator.Exact, - value: survey.id, - }, - ], - }, - propertiesViaUrl: true, - showExport: true, - showReload: true, - showEventFilter: true, - showPropertyFilter: true, - } - return surveyDataQuery -} - -export const getSurveyMetricsQueries = (surveyId: string): SurveyMetricsQueries => { - const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'` - const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'` - return { - surveysShown: { - kind: NodeKind.DataTableNode, - source: { kind: NodeKind.HogQLQuery, query: surveysShownHogqlQuery }, - }, - surveysDismissed: { - kind: NodeKind.DataTableNode, - source: { kind: NodeKind.HogQLQuery, query: surveysDismissedHogqlQuery }, - }, - } -} - -export const getSurveyDataVizQuery = (survey: Survey): InsightVizNode => { - return { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange: { - date_from: dayjs(survey.created_at).format('YYYY-MM-DD'), - date_to: dayjs().format('YYYY-MM-DD'), - }, - properties: [ - { - type: PropertyFilterType.Event, - key: '$survey_id', - operator: PropertyOperator.Exact, - value: survey.id, - }, - ], - series: [{ event: surveyEventName, kind: NodeKind.EventsNode }], - trendsFilter: { display: ChartDisplayType.ActionsBarValue }, - breakdown: { breakdown: '$survey_response', breakdown_type: 'event' }, - }, - showTable: true, - } -} - export interface SurveyLogicProps { id: string | 'new' } @@ -153,9 +86,9 @@ export interface SurveyMetricsQueries { } export const surveyLogic = kea([ - path(['scenes', 'surveys', 'surveyLogic']), props({} as SurveyLogicProps), key(({ id }) => id), + path((key) => ['scenes', 'surveys', 'surveyLogic', key]), connect(() => ({ actions: [ surveysLogic, @@ -179,10 +112,6 @@ export const surveyLogic = kea([ stopSurvey: true, archiveSurvey: true, resumeSurvey: true, - setDataTableQuery: (query: DataTableNode) => ({ query }), - setSurveyMetricsQueries: (surveyMetricsQueries: SurveyMetricsQueries) => ({ surveyMetricsQueries }), - setSurveyDataVizQuery: (surveyDataVizQuery: InsightVizNode) => ({ surveyDataVizQuery }), - setHasTargetingFlag: (hasTargetingFlag: boolean) => ({ hasTargetingFlag }), }), loaders(({ props, actions }) => ({ survey: { @@ -213,16 +142,6 @@ export const surveyLogic = kea([ }, })), listeners(({ actions }) => ({ - loadSurveySuccess: ({ survey }) => { - if (survey.start_date && survey.id !== 'new') { - actions.setDataTableQuery(getSurveyDataQuery(survey as Survey)) - actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id)) - actions.setSurveyDataVizQuery(getSurveyDataVizQuery(survey as Survey)) - } - if (survey.targeting_flag) { - actions.setHasTargetingFlag(true) - } - }, createSurveySuccess: ({ survey }) => { lemonToast.success(<>Survey {survey.name} created) actions.loadSurveys() @@ -237,8 +156,6 @@ export const surveyLogic = kea([ }, launchSurveySuccess: ({ survey }) => { lemonToast.success(<>Survey {survey.name} launched) - actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id)) - actions.setDataTableQuery(getSurveyDataQuery(survey)) actions.loadSurveys() actions.reportSurveyLaunched(survey) }, @@ -261,30 +178,6 @@ export const surveyLogic = kea([ editingSurvey: (_, { editing }) => editing, }, ], - dataTableQuery: [ - null as DataTableNode | null, - { - setDataTableQuery: (_, { query }) => query, - }, - ], - surveyMetricsQueries: [ - null as SurveyMetricsQueries | null, - { - setSurveyMetricsQueries: (_, { surveyMetricsQueries }) => surveyMetricsQueries, - }, - ], - surveyDataVizQuery: [ - null as InsightVizNode | null, - { - setSurveyDataVizQuery: (_, { surveyDataVizQuery }) => surveyDataVizQuery, - }, - ], - hasTargetingFlag: [ - false, - { - setHasTargetingFlag: (_, { hasTargetingFlag }) => hasTargetingFlag, - }, - ], }), selectors({ isSurveyRunning: [ @@ -320,6 +213,142 @@ export const surveyLogic = kea([ ) }, ], + dataTableQuery: [ + (s) => [s.survey], + (survey): DataTableNode | null => { + if (survey.id === 'new') { + return null + } + const createdAt = (survey as Survey).created_at + + return { + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.EventsQuery, + select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'], + orderBy: ['timestamp DESC'], + where: [`event == 'survey sent' or event == '${survey.name} survey sent'`], + after: createdAt, + properties: [ + { + type: PropertyFilterType.Event, + key: '$survey_id', + operator: PropertyOperator.Exact, + value: survey.id, + }, + ], + }, + propertiesViaUrl: true, + showExport: true, + showReload: true, + showEventFilter: true, + showPropertyFilter: true, + showTimings: false, + } + }, + ], + surveyMetricsQueries: [ + (s) => [s.survey], + (survey): SurveyMetricsQueries | null => { + const surveyId = survey.id + if (surveyId === 'new') { + return null + } + const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD') + const endDate = survey.end_date + ? dayjs(survey.end_date).format('YYYY-MM-DD') + : dayjs().add(1, 'day').format('YYYY-MM-DD') + + const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' ` + const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}'` + return { + surveysShown: { + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.HogQLQuery, + query: surveysShownHogqlQuery, + }, + showTimings: false, + }, + surveysDismissed: { + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.HogQLQuery, + query: surveysDismissedHogqlQuery, + }, + showTimings: false, + }, + } + }, + ], + surveyRatingQuery: [ + (s) => [s.survey], + (survey): InsightVizNode | null => { + if (survey.id === 'new') { + return null + } + const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD') + const endDate = survey.end_date + ? dayjs(survey.end_date).format('YYYY-MM-DD') + : dayjs().add(1, 'day').format('YYYY-MM-DD') + + return { + kind: NodeKind.InsightVizNode, + source: { + kind: NodeKind.TrendsQuery, + dateRange: { + date_from: startDate, + date_to: endDate, + }, + properties: [ + { + type: PropertyFilterType.Event, + key: '$survey_id', + operator: PropertyOperator.Exact, + value: survey.id, + }, + ], + series: [{ event: surveyEventName, kind: NodeKind.EventsNode }], + trendsFilter: { display: ChartDisplayType.ActionsBarValue }, + breakdown: { breakdown: '$survey_response', breakdown_type: 'event' }, + }, + showTable: true, + } + }, + ], + surveyMultipleChoiceQuery: [ + (s) => [s.survey], + (survey): DataTableNode | null => { + if (survey.id === 'new') { + return null + } + + const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD') + const endDate = survey.end_date + ? dayjs(survey.end_date).format('YYYY-MM-DD') + : dayjs().add(1, 'day').format('YYYY-MM-DD') + + const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` + const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` + return { + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.HogQLQuery, + query: + survey.questions[0].type === SurveyQuestionType.SingleChoice + ? singleChoiceQuery + : multipleChoiceQuery, + }, + showTimings: false, + } + }, + ], + hasTargetingFlag: [ + (s) => [s.survey], + (survey): boolean => { + return !!survey.targeting_flag || !!(survey.id === 'new' && survey.targeting_flag_filters) + }, + ], }), forms(({ actions, props, values }) => ({ survey: { diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 2450187f8303d..d933d06f4e196 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -185,5 +185,4 @@ export const urls = { tab: 'notebooks', }).url, notebook: (shortId: string): string => `/notebooks/${shortId}`, - notebookEdit: (shortId: string): string => `/notebooks/${shortId}/edit`, } diff --git a/frontend/src/styles/utilities.scss b/frontend/src/styles/utilities.scss index 126d981427e89..745375f1c3f57 100644 --- a/frontend/src/styles/utilities.scss +++ b/frontend/src/styles/utilities.scss @@ -919,6 +919,13 @@ $decorations: underline, overline, line-through, no-underline; } } +.list-inside { + list-style-position: inside; +} +.list-outside { + list-style-position: outside; +} + .shadow { box-shadow: var(--shadow-elevation); } diff --git a/frontend/src/toolbar/button/HedgehogButton.tsx b/frontend/src/toolbar/button/HedgehogButton.tsx index 634ea3a38732e..44f1b20ae2d0b 100644 --- a/frontend/src/toolbar/button/HedgehogButton.tsx +++ b/frontend/src/toolbar/button/HedgehogButton.tsx @@ -45,6 +45,7 @@ export function HedgehogButton(): JSX.Element { onPositionChange={(actor) => { saveDragPosition(actor.x + SPRITE_SIZE * 0.5, -actor.y - SPRITE_SIZE * 0.5) }} + isDarkModeOn={false} /> )} diff --git a/frontend/src/types.ts b/frontend/src/types.ts index cf091c4c88296..de8b30192d4a6 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -1793,7 +1793,6 @@ export enum RecordingWindowFilter { export interface EditorFilterProps { query: InsightQueryNode - setQuery: (node: InsightQueryNode) => void insightProps: InsightLogicProps } @@ -2055,6 +2054,7 @@ export interface InsightLogicProps { doNotLoad?: boolean /** query when used as ad-hoc insight */ query?: InsightVizNode + setQuery?: (node: InsightVizNode) => void } export interface SetInsightOptions { @@ -2191,6 +2191,7 @@ export interface FeatureFlagType extends Omit + type: NotebookNodeType +} + export enum NotebookTarget { Popover = 'popover', Auto = 'auto', @@ -3093,6 +3097,8 @@ export type BatchExportDestinationS3 = { aws_secret_access_key: string exclude_events: string[] compression: string | null + encryption: string | null + kms_key_id: string | null } } diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 84d604bfc1357..233b3d446d5cb 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0347_add_bigquery_export_type +posthog: 0350_add_notebook_text_content sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/package.json b/package.json index 1fee283b008d2..3f8131541f4c5 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,7 @@ "@monaco-editor/react": "4.4.6", "@posthog/plugin-scaffold": "^1.3.2", "@react-hook/size": "^2.1.2", - "@rrweb/types": "^2.0.0-alpha.9", + "@rrweb/types": "^2.0.0-alpha.11", "@sentry/react": "7.22.0", "@testing-library/dom": ">=7.21.4", "@tiptap/core": "^2.1.0-rc.12", @@ -125,7 +125,8 @@ "kea-window-values": "^3.0.0", "md5": "^2.3.0", "monaco-editor": "^0.39.0", - "posthog-js": "1.78.1", + "papaparse": "^5.4.1", + "posthog-js": "1.78.5", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -154,7 +155,7 @@ "react-virtualized": "^9.22.5", "require-from-string": "^2.0.2", "resize-observer-polyfill": "^1.5.1", - "rrweb": "^2.0.0-alpha.9", + "rrweb": "^2.0.0-alpha.11", "sass": "^1.26.2", "use-debounce": "^9.0.3", "use-resize-observer": "^8.0.0", @@ -206,6 +207,7 @@ "@types/jest-image-snapshot": "^6.1.0", "@types/md5": "^2.3.0", "@types/node": "^18.11.9", + "@types/papaparse": "^5.3.8", "@types/pixelmatch": "^5.2.4", "@types/pngjs": "^6.0.1", "@types/query-selector-shadow-dom": "^1.0.0", diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png index 3b185216c6362..8b8203a70dcf5 100644 Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png differ diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png index 2ce1d7971c1e1..8b8203a70dcf5 100644 Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png differ diff --git a/plugin-server/functional_tests/webhooks.test.ts b/plugin-server/functional_tests/webhooks.test.ts index 0fb7155790034..82f1bfe9bf186 100644 --- a/plugin-server/functional_tests/webhooks.test.ts +++ b/plugin-server/functional_tests/webhooks.test.ts @@ -199,6 +199,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => { properties: { $creator_event_uuid: eventUuid, $initial_current_url: 'http://localhost:8000', + $current_url: 'http://localhost:8000', email: 't@t.com', }, uuid: expect.any(String), @@ -208,6 +209,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => { $sent_at: expect.any(String), $set: { email: 't@t.com', + $current_url: 'http://localhost:8000', }, $set_once: { $initial_current_url: 'http://localhost:8000', diff --git a/plugin-server/package.json b/plugin-server/package.json index be9bebdd9b2cc..e2d766f344bba 100644 --- a/plugin-server/package.json +++ b/plugin-server/package.json @@ -11,6 +11,7 @@ "start": "pnpm start:dist", "start:dist": "BASE_DIR=.. node dist/index.js", "start:dev": "NODE_ENV=dev BASE_DIR=.. nodemon --watch src/ --exec node -r @swc-node/register src/index.ts", + "start:devNoWatch": "NODE_ENV=dev BASE_DIR=.. node -r @swc-node/register src/index.ts", "build": "pnpm clean && pnpm compile", "clean": "rm -rf dist/*", "typescript:compile": "tsc -b", diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index ef98937b81bff..f3245ce62232e 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -7,6 +7,8 @@ import { KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, } from './kafka-topics' +export const DEFAULT_HTTP_SERVER_PORT = 6738 + export const defaultConfig = overrideWithEnv(getDefaultConfig()) export function getDefaultConfig(): PluginsServerConfig { @@ -44,6 +46,7 @@ export function getDefaultConfig(): PluginsServerConfig { KAFKA_SASL_PASSWORD: undefined, KAFKA_CLIENT_RACK: undefined, KAFKA_CONSUMPTION_USE_RDKAFKA: false, // Transitional setting, ignored for consumers that only support one library + KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: true, // If true, use the cooperative rebalance strategy, otherwise uses the default ('range,roundrobin') KAFKA_CONSUMPTION_MAX_BYTES: 10_485_760, // Default value for kafkajs KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: 1_048_576, // Default value for kafkajs, must be bigger than message size KAFKA_CONSUMPTION_MAX_WAIT_MS: 1_000, // Down from the 5s default for kafkajs @@ -74,6 +77,7 @@ export function getDefaultConfig(): PluginsServerConfig { SENTRY_DSN: null, SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: 0, SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: 0, + HTTP_SERVER_PORT: DEFAULT_HTTP_SERVER_PORT, STATSD_HOST: null, STATSD_PORT: 8125, STATSD_PREFIX: 'plugin-server.', @@ -116,6 +120,7 @@ export function getDefaultConfig(): PluginsServerConfig { OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password', OBJECT_STORAGE_BUCKET: 'posthog', PLUGIN_SERVER_MODE: null, + PLUGIN_LOAD_SEQUENTIALLY: false, KAFKAJS_LOG_LEVEL: 'WARN', HISTORICAL_EXPORTS_ENABLED: true, HISTORICAL_EXPORTS_MAX_RETRY_COUNT: 15, @@ -126,6 +131,12 @@ export function getDefaultConfig(): PluginsServerConfig { USE_KAFKA_FOR_SCHEDULED_TASKS: true, CLOUD_DEPLOYMENT: 'default', // Used as a Sentry tag + STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes + STARTUP_PROFILE_CPU: false, + STARTUP_PROFILE_HEAP: false, + STARTUP_PROFILE_HEAP_INTERVAL: 512 * 1024, // default v8 value + STARTUP_PROFILE_HEAP_DEPTH: 16, // default v8 value + SESSION_RECORDING_KAFKA_HOSTS: undefined, SESSION_RECORDING_KAFKA_SECURITY_PROTOCOL: undefined, SESSION_RECORDING_KAFKA_BATCH_SIZE: 500, diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts index a82aed8861098..03c9e2de6db37 100644 --- a/plugin-server/src/kafka/batch-consumer.ts +++ b/plugin-server/src/kafka/batch-consumer.ts @@ -1,11 +1,12 @@ import { GlobalConfig, KafkaConsumer, Message } from 'node-rdkafka-acosom' -import { exponentialBuckets, Histogram } from 'prom-client' +import { exponentialBuckets, Gauge, Histogram } from 'prom-client' import { status } from '../utils/status' import { createAdminClient, ensureTopicExists } from './admin' import { commitOffsetsForMessages, consumeMessages, + countPartitionsPerTopic, createKafkaConsumer, disconnectConsumer, instrumentConsumerMetrics, @@ -32,6 +33,7 @@ export const startBatchConsumer = async ({ topicCreationTimeoutMs, eachBatch, autoCommit = true, + cooperativeRebalance = true, queuedMinMessages = 100000, }: { connectionConfig: GlobalConfig @@ -47,6 +49,7 @@ export const startBatchConsumer = async ({ topicCreationTimeoutMs: number eachBatch: (messages: Message[]) => Promise autoCommit?: boolean + cooperativeRebalance?: boolean queuedMinMessages?: number }): Promise => { // Starts consuming from `topic` in batches of `fetchBatchSize` messages, @@ -112,12 +115,12 @@ export const startBatchConsumer = async ({ // https://www.confluent.io/en-gb/blog/incremental-cooperative-rebalancing-in-kafka/ // for details on the advantages of this rebalancing strategy as well as // how it works. - 'partition.assignment.strategy': 'cooperative-sticky', + 'partition.assignment.strategy': cooperativeRebalance ? 'cooperative-sticky' : 'range,roundrobin', rebalance_cb: true, offset_commit_cb: true, }) - instrumentConsumerMetrics(consumer, groupId) + instrumentConsumerMetrics(consumer, groupId, cooperativeRebalance) let isShuttingDown = false let lastLoopTime = Date.now() @@ -181,6 +184,10 @@ export const startBatchConsumer = async ({ continue } + for (const [topic, count] of countPartitionsPerTopic(consumer.assignments())) { + kafkaAbsolutePartitionCount.labels({ topic }).set(count) + } + status.debug('🔁', 'main_loop_consumed', { messagesLength: messages.length }) if (!messages.length) { status.debug('🔁', 'main_loop_empty_batch', { cause: 'empty' }) @@ -278,3 +285,9 @@ const consumedMessageSizeBytes = new Histogram({ labelNames: ['topic', 'groupId', 'messageType'], buckets: exponentialBuckets(1, 8, 4).map((bucket) => bucket * 1024), }) + +const kafkaAbsolutePartitionCount = new Gauge({ + name: 'kafka_absolute_partition_count', + help: 'Number of partitions assigned to this consumer. (Absolute value from the consumer state.)', + labelNames: ['topic'], +}) diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts index f3b3a91d2be44..62b8e951ebc9f 100644 --- a/plugin-server/src/kafka/consumer.ts +++ b/plugin-server/src/kafka/consumer.ts @@ -1,4 +1,5 @@ import { + Assignment, ClientMetrics, CODES, ConsumerGlobalConfig, @@ -9,7 +10,7 @@ import { TopicPartitionOffset, } from 'node-rdkafka-acosom' -import { latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics' +import { kafkaRebalancePartitionCount, latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics' import { status } from '../utils/status' export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => { @@ -54,7 +55,24 @@ export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => { }) }) } -export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: string) => { + +export function countPartitionsPerTopic(assignments: Assignment[]): Map { + const partitionsPerTopic = new Map() + for (const assignment of assignments) { + if (partitionsPerTopic.has(assignment.topic)) { + partitionsPerTopic.set(assignment.topic, partitionsPerTopic.get(assignment.topic) + 1) + } else { + partitionsPerTopic.set(assignment.topic, 1) + } + } + return partitionsPerTopic +} + +export const instrumentConsumerMetrics = ( + consumer: RdKafkaConsumer, + groupId: string, + cooperativeRebalance: boolean +) => { // For each message consumed, we record the latest timestamp processed for // each partition assigned to this consumer group member. This consumer // should only provide metrics for the partitions that are assigned to it, @@ -79,6 +97,7 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st // // TODO: add other relevant metrics here // TODO: expose the internal librdkafka metrics as well. + const strategyString = cooperativeRebalance ? 'cooperative' : 'eager' consumer.on('rebalance', (error: LibrdKafkaError, assignments: TopicPartition[]) => { /** * see https://github.com/Blizzard/node-rdkafka#rebalancing errors are used to signal @@ -88,9 +107,23 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st * And when the balancing is completed the new assignments are received with ERR__ASSIGN_PARTITIONS */ if (error.code === CODES.ERRORS.ERR__ASSIGN_PARTITIONS) { - status.info('📝️', 'librdkafka rebalance, partitions assigned', { assignments }) + status.info('📝️', `librdkafka ${strategyString} rebalance, partitions assigned`, { assignments }) + for (const [topic, count] of countPartitionsPerTopic(assignments)) { + if (cooperativeRebalance) { + kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count) + } else { + kafkaRebalancePartitionCount.labels({ topic: topic }).set(count) + } + } } else if (error.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) { - status.info('📝️', 'librdkafka rebalance started, partitions revoked', { assignments }) + status.info('📝️', `librdkafka ${strategyString} rebalance started, partitions revoked`, { assignments }) + for (const [topic, count] of countPartitionsPerTopic(assignments)) { + if (cooperativeRebalance) { + kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count) + } else { + kafkaRebalancePartitionCount.labels({ topic: topic }).set(count) + } + } } else { // We had a "real" error status.error('⚠️', 'rebalance_error', { error }) diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts index a97d034778ac4..4d12925f0ce6b 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts @@ -4,20 +4,18 @@ import { EachBatchPayload, KafkaMessage } from 'kafkajs' import { RawClickHouseEvent } from '../../../types' import { convertToIngestionEvent } from '../../../utils/event' import { status } from '../../../utils/status' -import { groupIntoBatches } from '../../../utils/utils' import { runInstrumentedFunction } from '../../utils' import { KafkaJSIngestionConsumer } from '../kafka-queue' import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics' +import { eachBatchHandlerHelper } from './each-batch-webhooks' // Must require as `tsc` strips unused `import` statements and just requiring this seems to init some globals require('@sentry/tracing') export async function eachMessageAppsOnEventHandlers( - message: KafkaMessage, + clickHouseEvent: RawClickHouseEvent, queue: KafkaJSIngestionConsumer ): Promise { - const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent - const pluginConfigs = queue.pluginsServer.pluginConfigsPerTeam.get(clickHouseEvent.team_id) if (pluginConfigs) { // Elements parsing can be extremely slow, so we skip it for some plugins @@ -50,7 +48,14 @@ export async function eachBatchAppsOnEventHandlers( payload: EachBatchPayload, queue: KafkaJSIngestionConsumer ): Promise { - await eachBatch(payload, queue, eachMessageAppsOnEventHandlers, groupIntoBatches, 'async_handlers_on_event') + await eachBatchHandlerHelper( + payload, + (teamId) => queue.pluginsServer.pluginConfigsPerTeam.has(teamId), + (event) => eachMessageAppsOnEventHandlers(event, queue), + queue.pluginsServer.statsd, + queue.pluginsServer.WORKER_CONCURRENCY * queue.pluginsServer.TASKS_PER_WORKER, + 'on_event' + ) } export async function eachBatch( diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts index 427297a613b1b..fb671f0cd9633 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts @@ -17,10 +17,10 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics' require('@sentry/tracing') // exporting only for testing -export function groupIntoBatchesWebhooks( +export function groupIntoBatchesByUsage( array: KafkaMessage[], batchSize: number, - actionMatcher: ActionMatcher + shouldProcess: (teamId: number) => boolean ): { eventBatch: RawClickHouseEvent[]; lastOffset: string; lastTimestamp: string }[] { // Most events will not trigger a webhook call, so we want to filter them out as soon as possible // to achieve the highest effective concurrency when executing the actual HTTP calls. @@ -32,7 +32,7 @@ export function groupIntoBatchesWebhooks( let currentCount = 0 array.forEach((message, index) => { const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent - if (actionMatcher.hasWebhooks(clickHouseEvent.team_id)) { + if (shouldProcess(clickHouseEvent.team_id)) { currentBatch.push(clickHouseEvent) currentCount++ } else { @@ -58,18 +58,36 @@ export async function eachBatchWebhooksHandlers( hookCannon: HookCommander, statsd: StatsD | undefined, concurrency: number +): Promise { + await eachBatchHandlerHelper( + payload, + (teamId) => actionMatcher.hasWebhooks(teamId), + (event) => eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd), + statsd, + concurrency, + 'webhooks' + ) +} + +export async function eachBatchHandlerHelper( + payload: EachBatchPayload, + shouldProcess: (teamId: number) => boolean, + eachMessageHandler: (event: RawClickHouseEvent) => Promise, + statsd: StatsD | undefined, + concurrency: number, + stats_key: string ): Promise { // similar to eachBatch function in each-batch.ts, but without the dependency on the KafkaJSIngestionConsumer // & handling the different batching return type - const key = 'async_handlers_webhooks' + const key = `async_handlers_${stats_key}` const batchStartTimer = new Date() const loggingKey = `each_batch_${key}` const { batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, isRunning, isStale }: EachBatchPayload = payload - const transaction = Sentry.startTransaction({ name: `eachBatchWebhooks` }) + const transaction = Sentry.startTransaction({ name: `eachBatch${stats_key}` }) try { - const batchesWithOffsets = groupIntoBatchesWebhooks(batch.messages, concurrency, actionMatcher) + const batchesWithOffsets = groupIntoBatchesByUsage(batch.messages, concurrency, shouldProcess) statsd?.histogram('ingest_event_batching.input_length', batch.messages.length, { key: key }) statsd?.histogram('ingest_event_batching.batch_count', batchesWithOffsets.length, { key: key }) @@ -88,9 +106,7 @@ export async function eachBatchWebhooksHandlers( } await Promise.all( - eventBatch.map((event: RawClickHouseEvent) => - eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd).finally(() => heartbeat()) - ) + eventBatch.map((event: RawClickHouseEvent) => eachMessageHandler(event).finally(() => heartbeat())) ) resolveOffset(lastOffset) diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts index da51173e0507f..7989efd4b356a 100644 --- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts +++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts @@ -255,6 +255,7 @@ export class IngestionConsumer { consumerMaxWaitMs: this.pluginsServer.KAFKA_CONSUMPTION_MAX_WAIT_MS, fetchBatchSize: 500, topicCreationTimeoutMs: this.pluginsServer.KAFKA_TOPIC_CREATION_TIMEOUT_MS, + cooperativeRebalance: this.pluginsServer.KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE, eachBatch: (payload) => this.eachBatchConsumer(payload), }) this.consumerReady = true diff --git a/plugin-server/src/main/ingestion-queues/metrics.ts b/plugin-server/src/main/ingestion-queues/metrics.ts index 97188247cbefa..099832e1ea14c 100644 --- a/plugin-server/src/main/ingestion-queues/metrics.ts +++ b/plugin-server/src/main/ingestion-queues/metrics.ts @@ -2,6 +2,12 @@ import { Counter, Gauge } from 'prom-client' +export const kafkaRebalancePartitionCount = new Gauge({ + name: 'kafka_rebalance_partition_count', + help: 'Number of partitions assigned to this consumer. (Calculated during rebalance events.)', + labelNames: ['topic'], +}) + export const latestOffsetTimestampGauge = new Gauge({ name: 'latest_processed_timestamp_ms', help: 'Timestamp of the latest offset that has been committed.', diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts index bf0a242496fd3..c9dacf1fabdef 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts @@ -105,16 +105,6 @@ export class ReplayEventsIngester { return drop('producer_not_ready') } - if (event.replayIngestionConsumer !== 'v2') { - eventDroppedCounter - .labels({ - event_type: 'session_recordings_replay_events', - drop_cause: 'not_target_consumer', - }) - .inc() - return - } - if ( await this.offsetHighWaterMarker.isBelowHighWaterMark( event.metadata, diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts index 3a6e9b291c602..8e0473df357fe 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts @@ -279,7 +279,6 @@ export class SessionRecordingIngesterV2 { session_id: event.properties?.$session_id, window_id: event.properties?.$window_id, events: event.properties.$snapshot_items, - replayIngestionConsumer: event.properties?.$snapshot_consumer ?? 'v1', } return recordingMessage diff --git a/plugin-server/src/main/ingestion-queues/session-recording/types.ts b/plugin-server/src/main/ingestion-queues/session-recording/types.ts index c29c1ad81f1db..6bff13bbde468 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/types.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/types.ts @@ -14,8 +14,6 @@ export type IncomingRecordingMessage = { session_id: string window_id?: string events: RRWebEvent[] - // NOTE: This is only for migrating from one consumer to the other - replayIngestionConsumer: 'v1' | 'v2' } // This is the incoming message from Kafka diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts index a89355cb51c98..08fc4c6ed0e66 100644 --- a/plugin-server/src/main/pluginsServer.ts +++ b/plugin-server/src/main/pluginsServer.ts @@ -1,10 +1,12 @@ import * as Sentry from '@sentry/node' +import fs from 'fs' import { Server } from 'http' import { CompressionCodecs, CompressionTypes, Consumer, KafkaJSProtocolError } from 'kafkajs' // @ts-expect-error no type definitions import SnappyCodec from 'kafkajs-snappy' import * as schedule from 'node-schedule' import { Counter } from 'prom-client' +import v8Profiler from 'v8-profiler-next' import { getPluginServerCapabilities } from '../capabilities' import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config' @@ -63,6 +65,7 @@ export async function startPluginsServer( status.updatePrompt(serverConfig.PLUGIN_SERVER_MODE) status.info('ℹ️', `${serverConfig.WORKER_CONCURRENCY} workers, ${serverConfig.TASKS_PER_WORKER} tasks per worker`) + runStartupProfiles(serverConfig) // Structure containing initialized clients for Postgres, Kafka, Redis, etc. let hub: Hub | undefined @@ -444,7 +447,7 @@ export async function startPluginsServer( } if (capabilities.http) { - httpServer = createHttpServer(healthChecks, analyticsEventsIngestionConsumer) + httpServer = createHttpServer(serverConfig.HTTP_SERVER_PORT, healthChecks, analyticsEventsIngestionConsumer) } // If session recordings consumer is defined, then join it. If join @@ -508,3 +511,26 @@ const kafkaProtocolErrors = new Counter({ help: 'Kafka protocol errors encountered, by type', labelNames: ['type', 'code'], }) + +function runStartupProfiles(config: PluginsServerConfig) { + if (config.STARTUP_PROFILE_CPU) { + status.info('🩺', `Collecting cpu profile...`) + v8Profiler.setGenerateType(1) + v8Profiler.startProfiling('startup', true) + setTimeout(() => { + const profile = v8Profiler.stopProfiling('startup') + fs.writeFileSync('./startup.cpuprofile', JSON.stringify(profile)) + status.info('🩺', `Wrote cpu profile to disk`) + profile.delete() + }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000) + } + if (config.STARTUP_PROFILE_HEAP) { + status.info('🩺', `Collecting heap profile...`) + v8Profiler.startSamplingHeapProfiling(config.STARTUP_PROFILE_HEAP_INTERVAL, config.STARTUP_PROFILE_HEAP_DEPTH) + setTimeout(() => { + const profile = v8Profiler.stopSamplingHeapProfiling() + fs.writeFileSync('./startup.heapprofile', JSON.stringify(profile)) + status.info('🩺', `Wrote heap profile to disk`) + }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000) + } +} diff --git a/plugin-server/src/main/services/http-server.ts b/plugin-server/src/main/services/http-server.ts index bccee47d21e2f..89716d23366eb 100644 --- a/plugin-server/src/main/services/http-server.ts +++ b/plugin-server/src/main/services/http-server.ts @@ -5,13 +5,12 @@ import * as prometheus from 'prom-client' import { status } from '../../utils/status' -export const HTTP_SERVER_PORT = 6738 - prometheus.collectDefaultMetrics() const v8Profiler = require('v8-profiler-next') v8Profiler.setGenerateType(1) export function createHttpServer( + port: number, healthChecks: { [service: string]: () => Promise | boolean }, analyticsEventsIngestionConsumer?: KafkaJSIngestionConsumer | IngestionConsumer ): Server { @@ -47,7 +46,7 @@ export function createHttpServer( // } // } const checkResults = await Promise.all( - // Note that we do not ues `Promise.allSettled` here so we can + // Note that we do not use `Promise.allSettled` here so we can // assume that all promises have resolved. If there was a // rejected promise, the http server should catch it and return // a 500 status code. @@ -118,8 +117,8 @@ export function createHttpServer( } }) - server.listen(HTTP_SERVER_PORT, () => { - status.info('🩺', `Status server listening on port ${HTTP_SERVER_PORT}`) + server.listen(port, () => { + status.info('🩺', `Status server listening on port ${port}`) }) return server @@ -155,8 +154,13 @@ function exportProfile(req: IncomingMessage, res: ServerResponse) { }, durationSeconds * 1000) break case 'heap': + // Additional params for sampling heap profile, higher precision means bigger profile. + // Defaults are taken from https://v8.github.io/api/head/classv8_1_1HeapProfiler.html + const interval = url.searchParams.get('interval') ? parseInt(url.searchParams.get('interval')!) : 512 * 1024 + const depth = url.searchParams.get('depth') ? parseInt(url.searchParams.get('depth')!) : 16 + sendHeaders('heapprofile') - v8Profiler.startSamplingHeapProfiling() + v8Profiler.startSamplingHeapProfiling(interval, depth) setTimeout(() => { outputProfileResult(res, type, v8Profiler.stopSamplingHeapProfiling()) }, durationSeconds * 1000) diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 62463957ad249..9cc7fbfa21687 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -21,7 +21,7 @@ import { VM } from 'vm2' import { ObjectStorage } from './main/services/object_storage' import { DB } from './utils/db/db' import { KafkaProducerWrapper } from './utils/db/kafka-producer-wrapper' -import { PostgresRouter } from './utils/db/postgres' /** Re-export Element from scaffolding, for backwards compat. */ +import { PostgresRouter } from './utils/db/postgres' import { UUID } from './utils/utils' import { AppMetrics } from './worker/ingestion/app-metrics' import { EventPipelineResult } from './worker/ingestion/event-pipeline/runner' @@ -33,8 +33,7 @@ import { RootAccessManager } from './worker/vm/extensions/helpers/root-acess-man import { LazyPluginVM } from './worker/vm/lazy' import { PromiseManager } from './worker/vm/promise-manager' -/** Re-export Element from scaffolding, for backwards compat. */ -export { Element } from '@posthog/plugin-scaffold' +export { Element } from '@posthog/plugin-scaffold' // Re-export Element from scaffolding, for backwards compat. type Brand = K & { __brand: T } @@ -130,6 +129,7 @@ export interface PluginsServerConfig { KAFKA_SASL_PASSWORD: string | undefined KAFKA_CLIENT_RACK: string | undefined KAFKA_CONSUMPTION_USE_RDKAFKA: boolean + KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: boolean KAFKA_CONSUMPTION_MAX_BYTES: number KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: number KAFKA_CONSUMPTION_MAX_WAIT_MS: number // fetch.wait.max.ms rdkafka parameter @@ -151,6 +151,7 @@ export interface PluginsServerConfig { SENTRY_DSN: string | null SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: number // Rate of tracing in plugin server (between 0 and 1) SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: number // Rate of profiling in plugin server (between 0 and 1) + HTTP_SERVER_PORT: number STATSD_HOST: string | null STATSD_PORT: number STATSD_PREFIX: string @@ -189,6 +190,7 @@ export interface PluginsServerConfig { OBJECT_STORAGE_SECRET_ACCESS_KEY: string OBJECT_STORAGE_BUCKET: string // the object storage bucket name PLUGIN_SERVER_MODE: PluginServerMode | null + PLUGIN_LOAD_SEQUENTIALLY: boolean // could help with reducing memory usage spikes on startup KAFKAJS_LOG_LEVEL: 'NOTHING' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' HISTORICAL_EXPORTS_ENABLED: boolean // enables historical exports for export apps HISTORICAL_EXPORTS_MAX_RETRY_COUNT: number @@ -201,6 +203,13 @@ export interface PluginsServerConfig { EVENT_OVERFLOW_BUCKET_REPLENISH_RATE: number CLOUD_DEPLOYMENT: string + // dump profiles to disk, covering the first N seconds of runtime + STARTUP_PROFILE_DURATION_SECONDS: number + STARTUP_PROFILE_CPU: boolean + STARTUP_PROFILE_HEAP: boolean + STARTUP_PROFILE_HEAP_INTERVAL: number + STARTUP_PROFILE_HEAP_DEPTH: number + // local directory might be a volume mount or a directory on disk (e.g. in local dev) SESSION_RECORDING_LOCAL_DIRECTORY: string SESSION_RECORDING_MAX_BUFFER_AGE_SECONDS: number diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index 710a163752a6b..4e37d8a5cd715 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -91,7 +91,6 @@ export async function createHub( : undefined, rejectUnauthorized: serverConfig.CLICKHOUSE_CA ? false : undefined, }) - await clickhouse.querying('SELECT 1') // test that the connection works status.info('👍', `ClickHouse ready`) status.info('🤔', `Connecting to Kafka...`) diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts index 49db8914194f6..9e4eb0a3c11b7 100644 --- a/plugin-server/src/utils/db/utils.ts +++ b/plugin-server/src/utils/db/utils.ts @@ -39,7 +39,22 @@ export function timeoutGuard( }, timeout) } -const campaignParams = new Set([ +const eventToPersonProperties = new Set([ + // mobile params + '$app_build', + '$app_name', + '$app_namespace', + '$app_version', + // web params + '$browser', + '$browser_version', + '$device_type', + '$current_url', + '$pathname', + '$os', + '$referring_domain', + '$referrer', + // campaign params 'utm_source', 'utm_medium', 'utm_campaign', @@ -50,31 +65,29 @@ const campaignParams = new Set([ 'fbclid', 'msclkid', ]) -const initialParams = new Set([ - '$browser', - '$browser_version', - '$device_type', - '$current_url', - '$pathname', - '$os', - '$referring_domain', - '$referrer', -]) -const combinedParams = new Set([...campaignParams, ...initialParams]) /** If we get new UTM params, make sure we set those **/ export function personInitialAndUTMProperties(properties: Properties): Properties { const propertiesCopy = { ...properties } - const maybeSet = Object.entries(properties).filter(([key]) => campaignParams.has(key)) - const maybeSetInitial = Object.entries(properties) - .filter(([key]) => combinedParams.has(key)) - .map(([key, value]) => [`$initial_${key.replace('$', '')}`, value]) - if (Object.keys(maybeSet).length > 0) { + const propertiesForPerson: [string, any][] = Object.entries(properties).filter(([key]) => + eventToPersonProperties.has(key) + ) + + // all potential params are checked for $initial_ values and added to $set_once + const maybeSetOnce: [string, any][] = propertiesForPerson.map(([key, value]) => [ + `$initial_${key.replace('$', '')}`, + value, + ]) + + // all found are also then added to $set + const maybeSet: [string, any][] = propertiesForPerson + + if (maybeSet.length > 0) { propertiesCopy.$set = { ...(properties.$set || {}), ...Object.fromEntries(maybeSet) } } - if (Object.keys(maybeSetInitial).length > 0) { - propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetInitial) } + if (maybeSetOnce.length > 0) { + propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetOnce) } } return propertiesCopy } diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts index 69c56640bf886..aace016721449 100644 --- a/plugin-server/src/utils/utils.ts +++ b/plugin-server/src/utils/utils.ts @@ -312,14 +312,6 @@ export function escapeClickHouseString(string: string): string { return string.replace(/\\/g, '\\\\').replace(/'/g, "\\'") } -export function groupIntoBatches(array: T[], batchSize: number): T[][] { - const batches = [] - for (let i = 0; i < array.length; i += batchSize) { - batches.push(array.slice(i, i + batchSize)) - } - return batches -} - /** Standardize JS code used internally to form without extraneous indentation. Template literal function. */ export function code(strings: TemplateStringsArray): string { const stringsConcat = strings.join('…') diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts index e5f1327895cfa..72a82a07d0aec 100644 --- a/plugin-server/src/worker/ingestion/person-state.ts +++ b/plugin-server/src/worker/ingestion/person-state.ts @@ -17,9 +17,17 @@ import { castTimestampOrNow, UUIDT } from '../../utils/utils' import { captureIngestionWarning } from './utils' const MAX_FAILED_PERSON_MERGE_ATTEMPTS = 3 + +export const mergeFinalFailuresCounter = new Counter({ + name: 'person_merge_final_failure_total', + help: 'Number of person merge final failures.', +}) + +// used to prevent identify from being used with generic IDs +// that we can safely assume stem from a bug or mistake // used to prevent identify from being used with generic IDs // that we can safely assume stem from a bug or mistake -const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([ +const BARE_CASE_INSENSITIVE_ILLEGAL_IDS = [ 'anonymous', 'guest', 'distinctid', @@ -30,17 +38,34 @@ const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([ 'undefined', 'true', 'false', -]) - -export const mergeFinalFailuresCounter = new Counter({ - name: 'person_merge_final_failure_total', - help: 'Number of person merge final failures.', -}) - -const CASE_SENSITIVE_ILLEGAL_IDS = new Set(['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined']) +] + +const BARE_CASE_SENSITIVE_ILLEGAL_IDS = ['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined'] + +// we have seen illegal ids received but wrapped in double quotes +// to protect ourselves from this we'll add the single- and double-quoted versions of the illegal ids +const singleQuoteIds = (ids: string[]) => ids.map((id) => `'${id}'`) +const doubleQuoteIds = (ids: string[]) => ids.map((id) => `"${id}"`) + +// some ids are illegal regardless of casing +// while others are illegal only when cased +// so, for example, we want to forbid `NaN` but not `nan` +// but, we will forbid `uNdEfInEd` and `undefined` +const CASE_INSENSITIVE_ILLEGAL_IDS = new Set( + BARE_CASE_INSENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS)).concat( + doubleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS) + ) +) + +const CASE_SENSITIVE_ILLEGAL_IDS = new Set( + BARE_CASE_SENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS)).concat( + doubleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS) + ) +) const isDistinctIdIllegal = (id: string): boolean => { - return id.trim() === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id) + const trimmed = id.trim() + return trimmed === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id) } // This class is responsible for creating/updating a single person through the process-event pipeline @@ -245,7 +270,7 @@ export class PersonState { this.teamId, this.timestamp ) - } else if (this.event.event === '$identify' && this.eventProperties['$anon_distinct_id']) { + } else if (this.event.event === '$identify' && '$anon_distinct_id' in this.eventProperties) { return await this.merge( String(this.eventProperties['$anon_distinct_id']), this.distinctId, diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts index 82de215adf5e2..44327a6a8bfd5 100644 --- a/plugin-server/src/worker/ingestion/process-event.ts +++ b/plugin-server/src/worker/ingestion/process-event.ts @@ -272,7 +272,7 @@ export interface SummarizedSessionRecordingEvent { team_id: number distinct_id: string session_id: string - first_url: string | undefined + first_url: string | null click_count: number keypress_count: number mouse_activity_count: number @@ -281,6 +281,8 @@ export interface SummarizedSessionRecordingEvent { console_warn_count: number console_error_count: number size: number + event_count: number + message_count: number } export const createSessionReplayEvent = ( @@ -311,7 +313,7 @@ export const createSessionReplayEvent = ( let consoleLogCount = 0 let consoleWarnCount = 0 let consoleErrorCount = 0 - let url: string | undefined = undefined + let url: string | null = null events.forEach((event) => { if (event.type === 3) { mouseActivity += 1 @@ -322,7 +324,7 @@ export const createSessionReplayEvent = ( keypressCount += 1 } } - if (!!event.data?.href?.trim().length && url === undefined) { + if (url === null && !!event.data?.href?.trim().length) { url = event.data.href } if (event.type === 6 && event.data?.plugin === 'rrweb/console@1') { @@ -339,22 +341,26 @@ export const createSessionReplayEvent = ( const activeTime = activeMilliseconds(events) + // NB forces types to be correct e.g. by truncating or rounding + // to ensure we don't send floats when we should send an integer const data: SummarizedSessionRecordingEvent = { uuid, team_id: team_id, - distinct_id: distinct_id, + distinct_id: String(distinct_id), session_id: session_id, first_timestamp: timestamps[0], last_timestamp: timestamps[timestamps.length - 1], - click_count: clickCount, - keypress_count: keypressCount, - mouse_activity_count: mouseActivity, + click_count: Math.trunc(clickCount), + keypress_count: Math.trunc(keypressCount), + mouse_activity_count: Math.trunc(mouseActivity), first_url: url, - active_milliseconds: activeTime, - console_log_count: consoleLogCount, - console_warn_count: consoleWarnCount, - console_error_count: consoleErrorCount, - size: Buffer.byteLength(JSON.stringify(events), 'utf8'), + active_milliseconds: Math.round(activeTime), + console_log_count: Math.trunc(consoleLogCount), + console_warn_count: Math.trunc(consoleWarnCount), + console_error_count: Math.trunc(consoleErrorCount), + size: Math.trunc(Buffer.byteLength(JSON.stringify(events), 'utf8')), + event_count: Math.trunc(events.length), + message_count: 1, } return data diff --git a/plugin-server/src/worker/plugins/setup.ts b/plugin-server/src/worker/plugins/setup.ts index 2ff72c9a899aa..4d2d2e33e8807 100644 --- a/plugin-server/src/worker/plugins/setup.ts +++ b/plugin-server/src/worker/plugins/setup.ts @@ -26,8 +26,11 @@ export async function setupPlugins(hub: Hub): Promise { pluginConfig.vm = statelessVms[plugin.id] } else { pluginConfig.vm = new LazyPluginVM(hub, pluginConfig) - pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig)) - + if (hub.PLUGIN_LOAD_SEQUENTIALLY) { + await loadPlugin(hub, pluginConfig) + } else { + pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig)) + } if (prevConfig) { void teardownPlugins(hub, prevConfig) } diff --git a/plugin-server/tests/http-server.test.ts b/plugin-server/tests/http-server.test.ts index eed0dd1907ffc..3900168cd2039 100644 --- a/plugin-server/tests/http-server.test.ts +++ b/plugin-server/tests/http-server.test.ts @@ -1,7 +1,7 @@ import http from 'http' +import { DEFAULT_HTTP_SERVER_PORT } from '../src/config/config' import { startPluginsServer } from '../src/main/pluginsServer' -import { HTTP_SERVER_PORT } from '../src/main/services/http-server' import { makePiscina } from '../src/worker/piscina' import { resetTestDatabase } from './helpers/sql' @@ -40,7 +40,7 @@ describe('http server', () => { ) await new Promise((resolve) => - http.get(`http://localhost:${HTTP_SERVER_PORT}/_health`, (res) => { + http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_health`, (res) => { const { statusCode } = res expect(statusCode).toEqual(200) resolve(null) @@ -68,7 +68,7 @@ describe('http server', () => { ) await new Promise((resolve) => - http.get(`http://localhost:${HTTP_SERVER_PORT}/_ready`, (res) => { + http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_ready`, (res) => { const { statusCode } = res expect(statusCode).toEqual(200) resolve(null) diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts index 617978884fe29..0580f53d2724b 100644 --- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts +++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts @@ -9,13 +9,10 @@ import { eachBatchLegacyIngestion, splitKafkaJSIngestionBatch, } from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion-kafkajs' -import { - eachBatch, - eachBatchAppsOnEventHandlers, -} from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent' +import { eachBatchAppsOnEventHandlers } from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent' import { eachBatchWebhooksHandlers, - groupIntoBatchesWebhooks, + groupIntoBatchesByUsage, } from '../../../src/main/ingestion-queues/batch-processing/each-batch-webhooks' import { ClickHouseTimestamp, @@ -24,7 +21,6 @@ import { PostIngestionEvent, RawClickHouseEvent, } from '../../../src/types' -import { groupIntoBatches } from '../../../src/utils/utils' import { ActionManager } from '../../../src/worker/ingestion/action-manager' import { ActionMatcher } from '../../../src/worker/ingestion/action-matcher' import { HookCommander } from '../../../src/worker/ingestion/hooks' @@ -150,26 +146,6 @@ describe('eachBatchX', () => { } }) - describe('eachBatch', () => { - it('calls eachMessage with the correct arguments', async () => { - const eachMessage = jest.fn(() => Promise.resolve()) - const batch = createKafkaJSBatch(event) - await eachBatch(batch, queue, eachMessage, groupIntoBatches, 'key') - - expect(eachMessage).toHaveBeenCalledWith({ value: JSON.stringify(event) }, queue) - }) - - it('tracks metrics based on the key', async () => { - const eachMessage = jest.fn(() => Promise.resolve()) - await eachBatch(createKafkaJSBatch(event), queue, eachMessage, groupIntoBatches, 'my_key') - - expect(queue.pluginsServer.statsd.timing).toHaveBeenCalledWith( - 'kafka_queue.each_batch_my_key', - expect.any(Date) - ) - }) - }) - describe('eachBatchAppsOnEventHandlers', () => { it('calls runAppsOnEventPipeline when useful', async () => { queue.pluginsServer.pluginConfigsPerTeam.set(2, [pluginConfig39]) @@ -333,11 +309,9 @@ describe('eachBatchX', () => { kafkaTimestamp: '2020-02-23 00:10:00.00' as ClickHouseTimestamp, }, ]) - const actionManager = new ActionManager(queue.pluginsServer.postgres) - const actionMatcher = new ActionMatcher(queue.pluginsServer.postgres, actionManager) - // mock hasWebhooks 10 calls, 1,3,10 should return false, others true - actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) - const result = groupIntoBatchesWebhooks(batch.batch.messages, 5, actionMatcher) + // teamIDs 1,3,10 should return false, others true + const toProcess = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) + const result = groupIntoBatchesByUsage(batch.batch.messages, 5, toProcess) expect(result).toEqual([ { eventBatch: expect.arrayContaining([ @@ -375,8 +349,7 @@ describe('eachBatchX', () => { ]) // make sure that if the last message would be a new batch and if it's going to be excluded we // still get the last batch as empty with the right offsite and timestamp - actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10) - const result2 = groupIntoBatchesWebhooks(batch.batch.messages, 7, actionMatcher) + const result2 = groupIntoBatchesByUsage(batch.batch.messages, 7, toProcess) expect(result2).toEqual([ { eventBatch: expect.arrayContaining([ diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts index c0912a2ca499b..31dc19d000f3b 100644 --- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts +++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts @@ -1,4 +1,7 @@ +import { Assignment } from 'node-rdkafka-acosom' + import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics' +import { countPartitionsPerTopic } from '../../../src/kafka/consumer' import { ServerInstance, startPluginsServer } from '../../../src/main/pluginsServer' import { LogLevel, PluginsServerConfig } from '../../../src/types' import { Hub } from '../../../src/types' @@ -79,3 +82,22 @@ describe.skip('IngestionConsumer', () => { expect(bufferCalls.length).toEqual(1) }) }) + +describe('countPartitionsPerTopic', () => { + it('should correctly count the number of partitions per topic', () => { + const assignments: Assignment[] = [ + { topic: 'topic1', partition: 0 }, + { topic: 'topic1', partition: 1 }, + { topic: 'topic2', partition: 0 }, + { topic: 'topic2', partition: 1 }, + { topic: 'topic2', partition: 2 }, + { topic: 'topic3', partition: 0 }, + ] + + const result = countPartitionsPerTopic(assignments) + expect(result.get('topic1')).toBe(2) + expect(result.get('topic2')).toBe(3) + expect(result.get('topic3')).toBe(1) + expect(result.size).toBe(3) + }) +}) diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts index 7bdb5d749e778..105ebf249e2ba 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts @@ -16,7 +16,6 @@ export function createIncomingRecordingMessage( session_id: 'session_id_1', window_id: 'window_id_1', events: [{ ...jsonFullSnapshot }], - replayIngestionConsumer: 'v2', ...partialIncomingMessage, metadata: { diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts index 87b66a7210fc1..c792c9b1947e5 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts @@ -211,7 +211,6 @@ describe('ingester', () => { timestamp: 1, topic: 'the_topic', }, - replayIngestionConsumer: 'v2', session_id: '018a47c2-2f4a-70a8-b480-5e51d8b8d070', team_id: 1, window_id: '018a47c2-2f4a-70a8-b480-5e52f5480448', diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts index 25251874e2086..94505831b8452 100644 --- a/plugin-server/tests/main/process-event.test.ts +++ b/plugin-server/tests/main/process-event.test.ts @@ -315,7 +315,7 @@ test('capture new person', async () => { let persons = await hub.db.fetchPersons() expect(persons[0].version).toEqual(0) expect(persons[0].created_at).toEqual(now) - let expectedProps = { + let expectedProps: Record = { $creator_event_uuid: uuid, $initial_browser: 'Chrome', $initial_browser_version: '95', @@ -329,6 +329,12 @@ test('capture new person', async () => { msclkid: 'BING ADS ID', $initial_referrer: 'https://google.com/?q=posthog', $initial_referring_domain: 'https://google.com', + $browser: 'Chrome', + $browser_version: '95', + $current_url: 'https://test.com', + $os: 'Mac OS X', + $referrer: 'https://google.com/?q=posthog', + $referring_domain: 'https://google.com', } expect(persons[0].properties).toEqual(expectedProps) @@ -343,7 +349,17 @@ test('capture new person', async () => { expect(events[0].properties).toEqual({ $ip: '127.0.0.1', $os: 'Mac OS X', - $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' }, + $set: { + utm_medium: 'twitter', + gclid: 'GOOGLE ADS ID', + msclkid: 'BING ADS ID', + $browser: 'Chrome', + $browser_version: '95', + $current_url: 'https://test.com', + $os: 'Mac OS X', + $referrer: 'https://google.com/?q=posthog', + $referring_domain: 'https://google.com', + }, token: 'THIS IS NOT A TOKEN FOR TEAM 2', $browser: 'Chrome', $set_once: { @@ -412,6 +428,12 @@ test('capture new person', async () => { msclkid: 'BING ADS ID', $initial_referrer: 'https://google.com/?q=posthog', $initial_referring_domain: 'https://google.com', + $browser: 'Firefox', + $browser_version: 80, + $current_url: 'https://test.com/pricing', + $os: 'Mac OS X', + $referrer: 'https://google.com/?q=posthog', + $referring_domain: 'https://google.com', } expect(persons[0].properties).toEqual(expectedProps) @@ -425,6 +447,9 @@ test('capture new person', async () => { expect(events[1].properties.$set).toEqual({ utm_medium: 'instagram', + $browser: 'Firefox', + $browser_version: 80, + $current_url: 'https://test.com/pricing', }) expect(events[1].properties.$set_once).toEqual({ $initial_browser: 'Firefox', @@ -481,6 +506,9 @@ test('capture new person', async () => { expect(persons[0].version).toEqual(1) expect(events[2].properties.$set).toEqual({ + $browser: 'Firefox', + $current_url: 'https://test.com/pricing', + utm_medium: 'instagram', }) expect(events[2].properties.$set_once).toEqual({ @@ -1236,6 +1264,8 @@ const sessionReplayEventTestCases: { | 'console_warn_count' | 'console_error_count' | 'size' + | 'event_count' + | 'message_count' > }[] = [ { @@ -1244,7 +1274,7 @@ const sessionReplayEventTestCases: { click_count: 1, keypress_count: 0, mouse_activity_count: 1, - first_url: undefined, + first_url: null, first_timestamp: '2023-04-25 18:58:13.469', last_timestamp: '2023-04-25 18:58:13.469', active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0 @@ -1252,6 +1282,8 @@ const sessionReplayEventTestCases: { console_warn_count: 0, console_error_count: 0, size: 73, + event_count: 1, + message_count: 1, }, }, { @@ -1260,7 +1292,7 @@ const sessionReplayEventTestCases: { click_count: 0, keypress_count: 1, mouse_activity_count: 1, - first_url: undefined, + first_url: null, first_timestamp: '2023-04-25 18:58:13.469', last_timestamp: '2023-04-25 18:58:13.469', active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0 @@ -1268,6 +1300,8 @@ const sessionReplayEventTestCases: { console_warn_count: 0, console_error_count: 0, size: 73, + event_count: 1, + message_count: 1, }, }, { @@ -1316,7 +1350,7 @@ const sessionReplayEventTestCases: { click_count: 0, keypress_count: 1, mouse_activity_count: 1, - first_url: undefined, + first_url: null, first_timestamp: '2023-04-25 18:58:13.469', last_timestamp: '2023-04-25 18:58:13.469', active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0 @@ -1324,6 +1358,8 @@ const sessionReplayEventTestCases: { console_warn_count: 3, console_error_count: 1, size: 762, + event_count: 7, + message_count: 1, }, }, { @@ -1362,6 +1398,8 @@ const sessionReplayEventTestCases: { console_warn_count: 0, console_error_count: 0, size: 213, + event_count: 2, + message_count: 1, }, }, { @@ -1381,7 +1419,7 @@ const sessionReplayEventTestCases: { click_count: 6, keypress_count: 0, mouse_activity_count: 6, - first_url: undefined, + first_url: null, first_timestamp: '2023-04-25 18:58:13.000', last_timestamp: '2023-04-25 18:58:19.000', active_milliseconds: 6000, // can sum up the activity across windows @@ -1389,6 +1427,8 @@ const sessionReplayEventTestCases: { console_warn_count: 0, console_error_count: 0, size: 433, + event_count: 6, + message_count: 1, }, }, ] diff --git a/plugin-server/tests/utils/db/utils.test.ts b/plugin-server/tests/utils/db/utils.test.ts index 5201b8e60b803..420c645472ff3 100644 --- a/plugin-server/tests/utils/db/utils.test.ts +++ b/plugin-server/tests/utils/db/utils.test.ts @@ -17,41 +17,74 @@ describe('personInitialAndUTMProperties()', () => { { tag_name: 'a', nth_child: 1, nth_of_type: 2, attr__class: 'btn btn-sm' }, { tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' }, ], + $app_build: 2, + $app_name: 'my app', + $app_namespace: 'com.posthog.myapp', + $app_version: '1.2.3', } - expect(personInitialAndUTMProperties(properties)).toEqual({ - distinct_id: 2, - $browser: 'Chrome', - $current_url: 'https://test.com', - $os: 'Mac OS X', - $browser_version: '95', - $referring_domain: 'https://google.com', - $referrer: 'https://google.com/?q=posthog', - utm_medium: 'twitter', - gclid: 'GOOGLE ADS ID', - msclkid: 'BING ADS ID', - $elements: [ - { - tag_name: 'a', - nth_child: 1, - nth_of_type: 2, - attr__class: 'btn btn-sm', + expect(personInitialAndUTMProperties(properties)).toMatchInlineSnapshot(` + Object { + "$app_build": 2, + "$app_name": "my app", + "$app_namespace": "com.posthog.myapp", + "$app_version": "1.2.3", + "$browser": "Chrome", + "$browser_version": "95", + "$current_url": "https://test.com", + "$elements": Array [ + Object { + "attr__class": "btn btn-sm", + "nth_child": 1, + "nth_of_type": 2, + "tag_name": "a", }, - { tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' }, - ], - $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' }, - $set_once: { - $initial_browser: 'Chrome', - $initial_current_url: 'https://test.com', - $initial_os: 'Mac OS X', - $initial_browser_version: '95', - $initial_utm_medium: 'twitter', - $initial_gclid: 'GOOGLE ADS ID', - $initial_msclkid: 'BING ADS ID', - $initial_referring_domain: 'https://google.com', - $initial_referrer: 'https://google.com/?q=posthog', - }, - }) + Object { + "$el_text": "💻", + "nth_child": 1, + "nth_of_type": 2, + "tag_name": "div", + }, + ], + "$os": "Mac OS X", + "$referrer": "https://google.com/?q=posthog", + "$referring_domain": "https://google.com", + "$set": Object { + "$app_build": 2, + "$app_name": "my app", + "$app_namespace": "com.posthog.myapp", + "$app_version": "1.2.3", + "$browser": "Chrome", + "$browser_version": "95", + "$current_url": "https://test.com", + "$os": "Mac OS X", + "$referrer": "https://google.com/?q=posthog", + "$referring_domain": "https://google.com", + "gclid": "GOOGLE ADS ID", + "msclkid": "BING ADS ID", + "utm_medium": "twitter", + }, + "$set_once": Object { + "$initial_app_build": 2, + "$initial_app_name": "my app", + "$initial_app_namespace": "com.posthog.myapp", + "$initial_app_version": "1.2.3", + "$initial_browser": "Chrome", + "$initial_browser_version": "95", + "$initial_current_url": "https://test.com", + "$initial_gclid": "GOOGLE ADS ID", + "$initial_msclkid": "BING ADS ID", + "$initial_os": "Mac OS X", + "$initial_referrer": "https://google.com/?q=posthog", + "$initial_referring_domain": "https://google.com", + "$initial_utm_medium": "twitter", + }, + "distinct_id": 2, + "gclid": "GOOGLE ADS ID", + "msclkid": "BING ADS ID", + "utm_medium": "twitter", + } + `) }) it('initial current domain regression test', () => { @@ -62,6 +95,7 @@ describe('personInitialAndUTMProperties()', () => { expect(personInitialAndUTMProperties(properties)).toEqual({ $current_url: 'https://test.com', $set_once: { $initial_current_url: 'https://test.com' }, + $set: { $current_url: 'https://test.com' }, }) }) }) diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts index 837079da765eb..343826d81a4f2 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts @@ -105,6 +105,7 @@ describe('Event Pipeline integration test', () => { $set: { personProp: 'value', anotherValue: 2, + $browser: 'Chrome', }, $set_once: { $initial_browser: 'Chrome', @@ -118,6 +119,7 @@ describe('Event Pipeline integration test', () => { expect(persons[0].properties).toEqual({ $creator_event_uuid: event.uuid, $initial_browser: 'Chrome', + $browser: 'Chrome', personProp: 'value', anotherValue: 2, }) diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts index 71d495bcf9bce..d2ce3aa76e383 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts @@ -85,6 +85,7 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => { $browser: 'Chrome', $set: { someProp: 'value', + $browser: 'Chrome', }, $set_once: { $initial_browser: 'Chrome', @@ -95,7 +96,12 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => { expect.objectContaining({ id: expect.any(Number), uuid: expect.any(String), - properties: { $initial_browser: 'Chrome', someProp: 'value', $creator_event_uuid: expect.any(String) }, + properties: { + $initial_browser: 'Chrome', + someProp: 'value', + $creator_event_uuid: expect.any(String), + $browser: 'Chrome', + }, version: 0, is_identified: false, }) diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts index b44f60e8d2dda..66fa35976d274 100644 --- a/plugin-server/tests/worker/ingestion/person-state.test.ts +++ b/plugin-server/tests/worker/ingestion/person-state.test.ts @@ -25,17 +25,20 @@ describe('PersonState.update()', () => { let uuid2: UUIDT let teamId: number let poEEmbraceJoin: boolean + let organizationId: string beforeAll(async () => { ;[hub, closeHub] = await createHub({}) await hub.db.clickhouseQuery('SYSTEM STOP MERGES') + + organizationId = await createOrganization(hub.db.postgres) }) beforeEach(async () => { poEEmbraceJoin = false uuid = new UUIDT() uuid2 = new UUIDT() - const organizationId = await createOrganization(hub.db.postgres) + teamId = await createTeam(hub.db.postgres, organizationId) jest.spyOn(hub.db, 'fetchPerson') @@ -1078,10 +1081,11 @@ describe('PersonState.update()', () => { hub.statsd = { increment: jest.fn() } as any }) - it('stops $identify if current distinct_id is illegal', async () => { + const illegalIds = ['', ' ', 'null', 'undefined', '"undefined"', '[object Object]', '"[object Object]"'] + it.each(illegalIds)('stops $identify if current distinct_id is illegal: `%s`', async (illegalId: string) => { const person = await personState({ event: '$identify', - distinct_id: '[object Object]', + distinct_id: illegalId, properties: { $anon_distinct_id: 'anonymous_id', }, @@ -1092,16 +1096,16 @@ describe('PersonState.update()', () => { expect(persons.length).toEqual(0) expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', { - distinctId: '[object Object]', + distinctId: illegalId, }) }) - it('stops $identify if $anon_distinct_id is illegal', async () => { + it.each(illegalIds)('stops $identify if $anon_distinct_id is illegal: `%s`', async (illegalId: string) => { const person = await personState({ event: '$identify', distinct_id: 'some_distinct_id', properties: { - $anon_distinct_id: 'undefined', + $anon_distinct_id: illegalId, }, }).handleIdentifyOrAlias() @@ -1110,7 +1114,7 @@ describe('PersonState.update()', () => { expect(persons.length).toEqual(0) expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', { - distinctId: 'undefined', + distinctId: illegalId, }) }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 52699fa9c3fb3..301601dff1e49 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true @@ -39,8 +39,8 @@ dependencies: specifier: ^2.1.2 version: 2.1.2(react@16.14.0) '@rrweb/types': - specifier: ^2.0.0-alpha.9 - version: 2.0.0-alpha.9 + specifier: ^2.0.0-alpha.11 + version: 2.0.0-alpha.11 '@sentry/react': specifier: 7.22.0 version: 7.22.0(react@16.14.0) @@ -194,9 +194,12 @@ dependencies: monaco-editor: specifier: ^0.39.0 version: 0.39.0 + papaparse: + specifier: ^5.4.1 + version: 5.4.1 posthog-js: - specifier: 1.78.1 - version: 1.78.1 + specifier: 1.78.5 + version: 1.78.5 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -282,8 +285,8 @@ dependencies: specifier: ^1.5.1 version: 1.5.1 rrweb: - specifier: ^2.0.0-alpha.9 - version: 2.0.0-alpha.9 + specifier: ^2.0.0-alpha.11 + version: 2.0.0-alpha.11 sass: specifier: ^1.26.2 version: 1.56.0 @@ -432,6 +435,9 @@ devDependencies: '@types/node': specifier: ^18.11.9 version: 18.11.9 + '@types/papaparse': + specifier: ^5.3.8 + version: 5.3.8 '@types/pixelmatch': specifier: ^5.2.4 version: 5.2.4 @@ -608,7 +614,7 @@ devDependencies: version: 7.3.1 storybook-addon-pseudo-states: specifier: 2.1.0 - version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0) + version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0) style-loader: specifier: ^2.0.0 version: 2.0.0(webpack@5.88.2) @@ -978,7 +984,7 @@ packages: engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.22.15 + '@babel/types': 7.22.17 dev: true /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10): @@ -2083,8 +2089,8 @@ packages: '@babel/helper-validator-identifier': 7.22.5 to-fast-properties: 2.0.0 - /@babel/types@7.22.15: - resolution: {integrity: sha512-X+NLXr0N8XXmN5ZsaQdm9U2SSC3UbIYq/doL++sueHOTisgZHoKaQtZxGuV2cUPQHMfjKEfg/g6oy7Hm6SKFtA==} + /@babel/types@7.22.17: + resolution: {integrity: sha512-YSQPHLFtQNE5xN9tHuZnzu8vPr61wVTBZdfv1meex1NBosa4iT05k/Jw06ddJugi4bk7The/oSwQGFcksmEJQg==} engines: {node: '>=6.9.0'} dependencies: '@babel/helper-string-parser': 7.22.5 @@ -3928,10 +3934,10 @@ packages: type-fest: 2.19.0 dev: false - /@rrweb/types@2.0.0-alpha.9: - resolution: {integrity: sha512-yS2KghLSmSSxo6H7tHrJ6u+nWJA9zCXaKFyc79rUSX8RHHSImRqocTqJ8jz794kCIWA90rvaQayRONdHO+vB0Q==} + /@rrweb/types@2.0.0-alpha.11: + resolution: {integrity: sha512-8ccocIkT5J/bfNRQY85qR/g6p5YQFpgFO2cMt4+Ex7w31Lq0yqZBRaoYEsawQKpLrn5KOHkdn2UTUrna7WMQuA==} dependencies: - rrweb-snapshot: 2.0.0-alpha.9 + rrweb-snapshot: 2.0.0-alpha.11 dev: false /@sentry/browser@7.22.0: @@ -4519,11 +4525,11 @@ packages: tiny-invariant: 1.3.1 dev: true - /@storybook/channels@7.4.0: - resolution: {integrity: sha512-/1CU0s3npFumzVHLGeubSyPs21O3jNqtSppOjSB9iDTyV2GtQrjh5ntVwebfKpCkUSitx3x7TkCb9dylpEZ8+w==} + /@storybook/channels@7.4.1: + resolution: {integrity: sha512-gnE1mNrRF+9oCVRMq6MS/tLXJbYmf9P02PCC3KpMLcSsABdH5jcrACejzJVo/kE223knFH7NJc4BBj7+5h0uXA==} dependencies: - '@storybook/client-logger': 7.4.0 - '@storybook/core-events': 7.4.0 + '@storybook/client-logger': 7.4.1 + '@storybook/core-events': 7.4.1 '@storybook/global': 5.0.0 qs: 6.11.2 telejson: 7.2.0 @@ -4587,8 +4593,8 @@ packages: '@storybook/global': 5.0.0 dev: true - /@storybook/client-logger@7.4.0: - resolution: {integrity: sha512-4pBnf7+df1wXEVcF1civqxbrtccGGHQkfWQkJo49s53RXvF7SRTcif6XTx0V3cQV0v7I1C5mmLm0LNlmjPRP1Q==} + /@storybook/client-logger@7.4.1: + resolution: {integrity: sha512-2j0DQlKlPNY8XAaEZv+mUYEUm4dOWg6/Q92UNbvYPRK5qbXUvbMiQco5nmvg4LvMT6y99LhRSW2xrwEx5xKAKw==} dependencies: '@storybook/global': 5.0.0 dev: true @@ -4679,8 +4685,8 @@ packages: resolution: {integrity: sha512-7Pkgwmj/9B7Z3NNSn2swnviBrg9L1VeYSFw6JJKxtQskt8QoY8LxAsPzVMlHjqRmO6sO7lHo9FgpzIFxdmFaAA==} dev: true - /@storybook/core-events@7.4.0: - resolution: {integrity: sha512-JavEo4dw7TQdF5pSKjk4RtqLgsG2R/eWRI8vZ3ANKa0ploGAnQR/eMTfSxf6TUH3ElBWLJhi+lvUCkKXPQD+dw==} + /@storybook/core-events@7.4.1: + resolution: {integrity: sha512-F1tGb32XZ4FRfbtXdi4b+zdzWUjFz5rn3TF18mSuBGGXvxKU+4tywgjGQ3dKGdvuP754czn3poSdz2ZW08bLsQ==} dependencies: ts-dedent: 2.2.0 dev: true @@ -4845,20 +4851,20 @@ packages: ts-dedent: 2.2.0 dev: true - /@storybook/manager-api@7.4.0(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-sBfkkt0eZGTozeKrbzMtWLEOQrgqdk24OUJlkc2IDaucR1CBNjoCMjNeYg7cLDw0rXE8W3W3AdWtJnfsUbLMAQ==} + /@storybook/manager-api@7.4.1(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-nzYasETW20uDWpfST6JFf6c/GSFB/dj7xVtg5EpvAYF8GkErCk9TvNKdLNroRrIYm5VJxHWC2V+CJ07RuX3Glw==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: - '@storybook/channels': 7.4.0 - '@storybook/client-logger': 7.4.0 - '@storybook/core-events': 7.4.0 + '@storybook/channels': 7.4.1 + '@storybook/client-logger': 7.4.1 + '@storybook/core-events': 7.4.1 '@storybook/csf': 0.1.1 '@storybook/global': 5.0.0 - '@storybook/router': 7.4.0(react-dom@16.14.0)(react@16.14.0) - '@storybook/theming': 7.4.0(react-dom@16.14.0)(react@16.14.0) - '@storybook/types': 7.4.0 + '@storybook/router': 7.4.1(react-dom@16.14.0)(react@16.14.0) + '@storybook/theming': 7.4.1(react-dom@16.14.0)(react@16.14.0) + '@storybook/types': 7.4.1 dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 @@ -4954,15 +4960,15 @@ packages: util-deprecate: 1.0.2 dev: true - /@storybook/preview-api@7.4.0: - resolution: {integrity: sha512-ndXO0Nx+eE7ktVE4EqHpQZ0guX7yYBdruDdJ7B739C0+OoPWsJN7jAzUqq0NXaBcYrdaU5gTy+KnWJUt8R+OyA==} + /@storybook/preview-api@7.4.1: + resolution: {integrity: sha512-swmosWK73lP0CXDKMOwYIaaId28+muPDYX2V/0JmIOA+45HFXimeXZs3XsgVgQMutVF51QqnDA0pfrNgRofHgQ==} dependencies: - '@storybook/channels': 7.4.0 - '@storybook/client-logger': 7.4.0 - '@storybook/core-events': 7.4.0 + '@storybook/channels': 7.4.1 + '@storybook/client-logger': 7.4.1 + '@storybook/core-events': 7.4.1 '@storybook/csf': 0.1.1 '@storybook/global': 5.0.0 - '@storybook/types': 7.4.0 + '@storybook/types': 7.4.1 '@types/qs': 6.9.8 dequal: 2.0.3 lodash: 4.17.21 @@ -5097,13 +5103,13 @@ packages: react-dom: 16.14.0(react@16.14.0) dev: true - /@storybook/router@7.4.0(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-IATdtFL5C3ryjNQSwaQfrmiOZiVFoVNMevMoBGDC++g0laSW40TGiNK6fUjUDBKuOgbuDt4Svfbl29k21GefEg==} + /@storybook/router@7.4.1(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-7tE1B18jb+5+ujXd3BHcub85QnytIVBNA0iAo+o8MNwArISyodqp12y2D3w+QpXkg0GtPhAp/CMhzpyxotPhRQ==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: - '@storybook/client-logger': 7.4.0 + '@storybook/client-logger': 7.4.1 memoizerific: 1.11.3 qs: 6.11.2 react: 16.14.0 @@ -5196,14 +5202,14 @@ packages: react-dom: 16.14.0(react@16.14.0) dev: true - /@storybook/theming@7.4.0(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-eLjEf6G3cqlegfutF/iUrec9LrUjKDj7K4ZhGdACWrf7bQcODs99EK62e9/d8GNKr4b+QMSEuM6XNGaqdPnuzQ==} + /@storybook/theming@7.4.1(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-a4QajZbnYumq8ovtn7nW7BeNrk/TaWyKmUrIz4w08I6ghzESJA4aCWZ6394awbrruiIOzCCKOUq4mfWEsc8W6A==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: '@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@16.14.0) - '@storybook/client-logger': 7.4.0 + '@storybook/client-logger': 7.4.1 '@storybook/global': 5.0.0 memoizerific: 1.11.3 react: 16.14.0 @@ -5219,13 +5225,12 @@ packages: file-system-cache: 2.3.0 dev: true - /@storybook/types@7.4.0: - resolution: {integrity: sha512-XyzYkmeklywxvElPrIWLczi/PWtEdgTL6ToT3++FVxptsC2LZKS3Ue+sBcQ9xRZhkRemw4HQHwed5EW3dO8yUg==} + /@storybook/types@7.4.1: + resolution: {integrity: sha512-bjt1YDG9AocFBhIFRvGGbYZPlD223p+qAFcFgYdezU16fFE4ZGFUzUuq2ERkOofL7a2+OzLTCQ/SKe1jFkXCxQ==} dependencies: - '@storybook/channels': 7.4.0 + '@storybook/channels': 7.4.1 '@types/babel__core': 7.20.1 '@types/express': 4.17.17 - '@types/react': 16.14.34 file-system-cache: 2.3.0 dev: true @@ -5721,7 +5726,7 @@ packages: resolution: {integrity: sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw==} dependencies: '@babel/parser': 7.22.16 - '@babel/types': 7.22.15 + '@babel/types': 7.22.17 '@types/babel__generator': 7.6.4 '@types/babel__template': 7.4.1 '@types/babel__traverse': 7.20.1 @@ -5749,7 +5754,7 @@ packages: /@types/babel__traverse@7.20.1: resolution: {integrity: sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg==} dependencies: - '@babel/types': 7.22.15 + '@babel/types': 7.22.17 dev: true /@types/body-parser@1.19.2: @@ -6213,6 +6218,12 @@ packages: resolution: {integrity: sha512-sn7L+qQ6RLPdXRoiaE7bZ/Ek+o4uICma/lBFPyJEKDTPTBP1W8u0c4baj3EiS4DiqLs+Hk+KUGvMVJtAw3ePJg==} dev: false + /@types/papaparse@5.3.8: + resolution: {integrity: sha512-ArKIEOOWULbhi53wkAiRy1ze4wvrTfhpAj7Yfzva+EkmX2sV8PpFB+xqzJfzXNzK4me95FJH9QZt5NXFVGzOoQ==} + dependencies: + '@types/node': 18.11.9 + dev: true + /@types/parse-json@4.0.0: resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} dev: true @@ -12994,7 +13005,7 @@ packages: dependencies: universalify: 2.0.0 optionalDependencies: - graceful-fs: 4.2.10 + graceful-fs: 4.2.11 /jsprim@2.0.2: resolution: {integrity: sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==} @@ -14266,6 +14277,10 @@ packages: resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==} dev: true + /papaparse@5.4.1: + resolution: {integrity: sha512-HipMsgJkZu8br23pW15uvo6sib6wne/4woLZPlFf3rpDyMe9ywEXUsuD7+6K9PRkJlVT51j/sCOYDKGGS3ZJrw==} + dev: false + /param-case@3.0.4: resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} dependencies: @@ -14909,8 +14924,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.78.1: - resolution: {integrity: sha512-5tJoF56gGg4B4CSlLbWHuTpi7Ch7wksjCkPonHlQAc61ZZRymTB63tRheCvkcf+Omf8PBkO+2NJ0XEgrkRHE0A==} + /posthog-js@1.78.5: + resolution: {integrity: sha512-UUipML52LEyks7Pbx/3dpBJc2iPJrW+Ss6Y0BiIygn+QZoBjIe1WjE4Ep+Fnz7+cX1axex/ZiYholBnW7E4Aug==} dependencies: fflate: 0.4.8 dev: false @@ -16608,27 +16623,27 @@ packages: resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==} dev: false - /rrdom@2.0.0-alpha.9: - resolution: {integrity: sha512-jfaZ8tHi098P4GpPEtkOwnkucyKA5eGanAVHGPklzCqAeEq1Yx+9/y8AeOtF3yiobqKKkW8lLvFH2KrBH1CZlQ==} + /rrdom@2.0.0-alpha.11: + resolution: {integrity: sha512-U37m0t4jTz63wnVRcOQ5qFzSTrI5RdNgeXnHAha2Fmh9+1K+XuCx421a8D1wZk3WcDc2sFz/04FVdM0OD2caHg==} dependencies: - rrweb-snapshot: 2.0.0-alpha.9 + rrweb-snapshot: 2.0.0-alpha.11 dev: false - /rrweb-snapshot@2.0.0-alpha.9: - resolution: {integrity: sha512-mHg1uUE2iUf0MXLE//4r5cMynkbduwmaOEis4gC7EuqkUAC1pYoLpcYYVt9lD6dgYIF6BmK6dgLLzMpD/tTyyA==} + /rrweb-snapshot@2.0.0-alpha.11: + resolution: {integrity: sha512-N0dzeJA2VhrlSOadkKwCVmV/DuNOwBH+Lhx89hAf9PQK4lCS8AP4AaylhqUdZOYHqwVjqsYel/uZ4hN79vuLhw==} dev: false - /rrweb@2.0.0-alpha.9: - resolution: {integrity: sha512-8E2yiLY7IrFjDcVUZ7AcQtdBNFuTIsBrlCMpbyLua6X64dGRhOZ+IUDXLnAbNj5oymZgFtZu2UERG9rmV2VAng==} + /rrweb@2.0.0-alpha.11: + resolution: {integrity: sha512-vJ2gNvF+pUG9C2aaau7iSNqhWBSc4BwtUO4FpegOtDObuH4PIaxNJOlgHz82+WxKr9XPm93ER0LqmNpy0KYdKg==} dependencies: - '@rrweb/types': 2.0.0-alpha.9 + '@rrweb/types': 2.0.0-alpha.11 '@types/css-font-loading-module': 0.0.7 '@xstate/fsm': 1.6.5 base64-arraybuffer: 1.0.2 fflate: 0.4.8 mitt: 3.0.0 - rrdom: 2.0.0-alpha.9 - rrweb-snapshot: 2.0.0-alpha.9 + rrdom: 2.0.0-alpha.11 + rrweb-snapshot: 2.0.0-alpha.11 dev: false /rtl-css-js@1.16.0: @@ -17135,7 +17150,7 @@ packages: resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} dev: true - /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0): + /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0): resolution: {integrity: sha512-AwbCL1OiZ16aIeXSP/IOovkMwXy7NTZqmjkz+UM2guSGjvogHNA95NhuVyWoqieE+QWUpGO48+MrBGMeeJcHOQ==} peerDependencies: '@storybook/components': ^7.0.0 @@ -17153,8 +17168,8 @@ packages: dependencies: '@storybook/components': 7.3.1(@types/react-dom@16.9.17)(@types/react@16.14.34)(react-dom@16.14.0)(react@16.14.0) '@storybook/core-events': 7.3.1 - '@storybook/manager-api': 7.4.0(react-dom@16.14.0)(react@16.14.0) - '@storybook/preview-api': 7.4.0 + '@storybook/manager-api': 7.4.1(react-dom@16.14.0)(react@16.14.0) + '@storybook/preview-api': 7.4.1 '@storybook/theming': 7.3.1(react-dom@16.14.0)(react@16.14.0) react: 16.14.0 react-dom: 16.14.0(react@16.14.0) diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index bdd8ecf3ed555..f61543e14f5cb 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -1,9 +1,8 @@ import json from typing import Any, Dict, List, Optional, cast -from django.db.models import QuerySet +from django.db.models import QuerySet, Q from django.conf import settings -from django.db.models.query_utils import Q from rest_framework import authentication, exceptions, request, serializers, status, viewsets from rest_framework.decorators import action from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated @@ -70,6 +69,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo rollout_percentage = serializers.SerializerMethodField() experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) + surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True) analytics_dashboards = serializers.PrimaryKeyRelatedField( @@ -100,6 +100,7 @@ class Meta: "rollout_percentage", "ensure_experience_continuity", "experiment_set", + "surveys", "features", "rollback_conditions", "performed_rollback", @@ -129,6 +130,12 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict: return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data + def get_surveys(self, feature_flag: FeatureFlag) -> Dict: + from posthog.api.survey import SurveyAPISerializer + + return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore + # ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship + def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]: if self.get_is_simple_flag(feature_flag): return feature_flag.conditions[0].get("rollout_percentage") @@ -343,7 +350,9 @@ def get_queryset(self) -> QuerySet: .prefetch_related("experiment_set") .prefetch_related("features") .prefetch_related("analytics_dashboards") + .prefetch_related("surveys_linked_flag") ) + survey_targeting_flags = Survey.objects.filter(team=self.team, targeting_flag__isnull=False).values_list( "targeting_flag_id", flat=True ) @@ -434,6 +443,7 @@ def my_flags(self, request: request.Request, **kwargs): .prefetch_related("experiment_set") .prefetch_related("features") .prefetch_related("analytics_dashboards") + .prefetch_related("surveys_linked_flag") .select_related("created_by") .order_by("-created_at") ) diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 7f3cfae9be957..5c25efe42815d 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -1,5 +1,5 @@ from typing import Dict, List, Optional, Any - +from django.db.models import Q import structlog from django.db import transaction from django.db.models import QuerySet @@ -74,6 +74,7 @@ class Meta: "short_id", "title", "content", + "text_content", "version", "deleted", "created_at", @@ -250,8 +251,13 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query queryset = queryset.filter( last_modified_at__lt=relative_date_parse(request.GET["date_to"], self.team.timezone_info) ) - elif key == "s": - queryset = queryset.filter(title__icontains=request.GET["s"]) + elif key == "search": + queryset = queryset.filter( + # some notebooks have no text_content until next saved, so we need to check the title too + # TODO this can be removed once all/most notebooks have text_content + Q(title__search=request.GET["search"]) + | Q(text_content__search=request.GET["search"]) + ) elif key == "contains": contains = request.GET["contains"] match_pairs = contains.replace(",", " ").split(" ") diff --git a/posthog/api/query.py b/posthog/api/query.py index f6c9e871d0c6d..385f14d2f7905 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -25,6 +25,8 @@ from posthog.hogql.errors import HogQLException from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.query import execute_hogql_query + +from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner from posthog.models import Team from posthog.models.event.events_query import run_events_query from posthog.models.user import User @@ -203,22 +205,25 @@ def process_query(team: Team, query_json: Dict, default_limit: Optional[int] = N if query_kind == "EventsQuery": events_query = EventsQuery.parse_obj(query_json) - response = run_events_query(query=events_query, team=team, default_limit=default_limit) - return _unwrap_pydantic_dict(response) + events_response = run_events_query(query=events_query, team=team, default_limit=default_limit) + return _unwrap_pydantic_dict(events_response) elif query_kind == "HogQLQuery": hogql_query = HogQLQuery.parse_obj(query_json) - response = execute_hogql_query( + hogql_response = execute_hogql_query( query_type="HogQLQuery", query=hogql_query.query, team=team, filters=hogql_query.filters, default_limit=default_limit, ) - return _unwrap_pydantic_dict(response) + return _unwrap_pydantic_dict(hogql_response) elif query_kind == "HogQLMetadata": metadata_query = HogQLMetadata.parse_obj(query_json) - response = get_hogql_metadata(query=metadata_query, team=team) - return _unwrap_pydantic_dict(response) + metadata_response = get_hogql_metadata(query=metadata_query, team=team) + return _unwrap_pydantic_dict(metadata_response) + elif query_kind == "LifecycleQuery": + lifecycle_query_runner = LifecycleQueryRunner(query_json, team) + return _unwrap_pydantic_dict(lifecycle_query_runner.run()) elif query_kind == "DatabaseSchemaQuery": database = create_hogql_database(team.pk) return serialize_database(database) diff --git a/posthog/api/test/__snapshots__/test_session_recordings.ambr b/posthog/api/test/__snapshots__/test_session_recordings.ambr index 11f6e55469f01..e97965ce54b8f 100644 --- a/posthog/api/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/api/test/__snapshots__/test_session_recordings.ambr @@ -83,72 +83,6 @@ ' --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.100 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.101 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.102 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.103 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.104 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.106 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -159,7 +93,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.107 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.101 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -193,7 +127,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.108 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.102 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -201,7 +135,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.103 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -228,18 +162,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.110 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.104 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -254,7 +177,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.111 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -305,7 +228,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.112 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.106 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -334,7 +257,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.113 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.107 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -378,7 +301,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.114 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.108 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -407,7 +330,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.115 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -418,62 +341,62 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.116 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.117 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.110 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.118 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.111 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.119 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.112 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.113 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.120 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.114 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -484,7 +407,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.121 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.115 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -495,7 +418,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.122 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.116 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -506,7 +429,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.123 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.117 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -517,18 +440,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.124 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.118 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -539,7 +451,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.126 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.119 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -574,7 +486,18 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.127 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.120 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -582,7 +505,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.128 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.121 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -610,7 +533,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.122 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -625,18 +548,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.130 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.123 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -687,7 +599,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.131 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.124 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -716,7 +628,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.132 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -760,7 +672,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.133 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.126 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -789,7 +701,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.134 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.127 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -800,7 +712,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.135 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.128 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -811,7 +723,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.136 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -822,51 +734,51 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.137 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.138 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.130 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.139 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.131 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.132 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.140 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.133 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -877,7 +789,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.141 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.134 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -888,7 +800,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.142 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.135 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -899,18 +811,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.143 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.144 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.136 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -921,7 +822,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.137 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -957,7 +858,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.146 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.138 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -965,7 +866,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.147 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.139 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -994,7 +895,15 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.148 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14 + ' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.140 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1009,7 +918,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.141 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1060,15 +969,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15 - ' - SELECT "posthog_sessionrecordingviewed"."session_id" - FROM "posthog_sessionrecordingviewed" - WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 - AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.150 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.142 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1097,7 +998,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.151 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.143 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1141,7 +1042,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.152 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.144 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -1170,7 +1071,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.153 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1181,7 +1082,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.154 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.146 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1192,7 +1093,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.155 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.147 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1203,7 +1104,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.156 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.148 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1214,7 +1115,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.157 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1225,29 +1126,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.158 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.159 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1298,18 +1177,18 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.160 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.150 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.161 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.151 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1320,7 +1199,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.162 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.152 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1331,7 +1210,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.163 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.153 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1342,7 +1221,18 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.164 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.154 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.155 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -1379,7 +1269,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.156 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -1387,7 +1277,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.166 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.157 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1417,7 +1307,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.167 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.158 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1432,7 +1322,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.168 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.159 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1483,7 +1373,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1512,7 +1402,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.160 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1541,7 +1431,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.170 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.161 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1585,7 +1475,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.171 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.162 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -1614,7 +1504,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.172 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.163 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1625,7 +1515,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.173 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.164 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1636,7 +1526,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.174 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1647,7 +1537,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.175 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.166 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1658,7 +1548,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.176 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.167 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1669,7 +1559,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.177 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.168 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1680,7 +1570,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.178 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1691,18 +1581,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.179 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1746,29 +1625,29 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.180 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.170 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.181 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.171 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.182 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.172 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -1779,7 +1658,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.183 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.173 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -1817,7 +1696,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.184 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.174 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -1825,7 +1704,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.185 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.175 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1856,7 +1735,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.186 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.176 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1871,7 +1750,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.187 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.177 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1922,7 +1801,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.188 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.178 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -1951,7 +1830,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.189 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.179 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1995,7 +1874,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2024,7 +1903,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.190 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.180 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2053,7 +1932,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.191 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.181 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2064,7 +1943,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.192 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.182 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2075,7 +1954,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.193 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.183 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2086,7 +1965,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.194 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.184 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2097,6 +1976,177 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.185 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.186 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.187 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.188 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.189 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.190 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.191 + ' + SELECT "posthog_sessionrecording"."id", + "posthog_sessionrecording"."session_id", + "posthog_sessionrecording"."team_id", + "posthog_sessionrecording"."created_at", + "posthog_sessionrecording"."deleted", + "posthog_sessionrecording"."object_storage_path", + "posthog_sessionrecording"."distinct_id", + "posthog_sessionrecording"."duration", + "posthog_sessionrecording"."active_seconds", + "posthog_sessionrecording"."inactive_seconds", + "posthog_sessionrecording"."start_time", + "posthog_sessionrecording"."end_time", + "posthog_sessionrecording"."click_count", + "posthog_sessionrecording"."keypress_count", + "posthog_sessionrecording"."mouse_activity_count", + "posthog_sessionrecording"."console_log_count", + "posthog_sessionrecording"."console_warn_count", + "posthog_sessionrecording"."console_error_count", + "posthog_sessionrecording"."start_url", + "posthog_sessionrecording"."storage_version", + COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count" + FROM "posthog_sessionrecording" + LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id") + WHERE ("posthog_sessionrecording"."session_id" IN ('5', + '2', + '3', + '4', + '7', + '10', + '6', + '1', + '8', + '9') + AND "posthog_sessionrecording"."team_id" = 2) + GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.192 + ' + SELECT "posthog_sessionrecordingviewed"."session_id" + FROM "posthog_sessionrecordingviewed" + WHERE ("posthog_sessionrecordingviewed"."team_id" = 2 + AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.193 + ' + SELECT "posthog_persondistinctid"."id", + "posthog_persondistinctid"."team_id", + "posthog_persondistinctid"."person_id", + "posthog_persondistinctid"."distinct_id", + "posthog_persondistinctid"."version", + "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_persondistinctid" + INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id") + WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1', + 'user10', + 'user2', + 'user3', + 'user4', + 'user5', + 'user6', + 'user7', + 'user8', + 'user9') + AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.194 + ' + SELECT "posthog_persondistinctid"."id", + "posthog_persondistinctid"."team_id", + "posthog_persondistinctid"."person_id", + "posthog_persondistinctid"."distinct_id", + "posthog_persondistinctid"."version" + FROM "posthog_persondistinctid" + WHERE "posthog_persondistinctid"."person_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.195 ' SELECT "posthog_instancesetting"."id", @@ -2187,7 +2237,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2314,7 +2364,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2325,7 +2375,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2336,7 +2386,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2347,7 +2397,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2358,7 +2408,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2369,7 +2419,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2380,7 +2430,7 @@ "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' @@ -2397,39 +2447,6 @@ ' --- # name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.29 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RATE_LIMIT_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2459,7 +2476,18 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RATE_LIMIT_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2467,7 +2495,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2490,7 +2518,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2505,7 +2533,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2556,7 +2584,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -2585,7 +2613,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2629,7 +2657,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -2658,18 +2686,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2680,7 +2697,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2691,7 +2708,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2702,18 +2719,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2724,7 +2730,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2735,29 +2741,29 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2768,7 +2774,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2779,7 +2785,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -2790,18 +2796,18 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.50 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -2832,7 +2838,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -2840,7 +2846,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2864,7 +2870,18 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.50 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -2879,7 +2896,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -2930,7 +2947,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -2959,7 +2976,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3003,7 +3020,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -3032,7 +3049,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3043,7 +3060,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3054,7 +3071,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3065,29 +3082,29 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3098,7 +3115,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3109,7 +3126,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3120,7 +3137,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3131,7 +3148,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3142,18 +3159,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3164,7 +3170,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -3196,18 +3202,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -3215,7 +3210,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -3240,7 +3235,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -3255,7 +3250,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3306,7 +3301,18 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.74 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -3335,7 +3341,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.75 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3379,7 +3385,7 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.76 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -3408,7 +3414,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.77 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3419,7 +3425,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.78 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.74 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3430,18 +3436,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.79 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.75 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3452,7 +3447,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.80 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.76 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3463,7 +3458,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.81 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.77 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3474,7 +3469,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.82 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.78 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3485,7 +3480,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.83 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.79 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3496,40 +3491,40 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.84 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.80 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.86 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.81 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", "posthog_instancesetting"."raw_value" FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' ORDER BY "posthog_instancesetting"."id" ASC LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.87 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.82 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3540,7 +3535,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.88 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.83 ' SELECT "posthog_sessionrecording"."id", "posthog_sessionrecording"."session_id", @@ -3573,7 +3568,7 @@ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.84 ' SELECT "posthog_sessionrecordingviewed"."session_id" FROM "posthog_sessionrecordingviewed" @@ -3581,18 +3576,7 @@ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9 - ' - SELECT "posthog_instancesetting"."id", - "posthog_instancesetting"."key", - "posthog_instancesetting"."raw_value" - FROM "posthog_instancesetting" - WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' - ORDER BY "posthog_instancesetting"."id" ASC - LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ - ' ---- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.90 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -3618,7 +3602,7 @@ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.91 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.86 ' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -3633,7 +3617,7 @@ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.92 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.87 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3684,7 +3668,7 @@ LIMIT 21 ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.93 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.88 ' SELECT "posthog_user"."id", "posthog_user"."password", @@ -3713,7 +3697,7 @@ LIMIT 21 /**/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.94 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89 ' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -3757,7 +3741,18 @@ LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.95 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.90 ' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -3786,7 +3781,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.96 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.91 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3797,7 +3792,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.97 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.92 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3808,7 +3803,7 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.98 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.93 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3819,7 +3814,29 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- -# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.99 +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.94 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.95 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.96 ' SELECT "posthog_instancesetting"."id", "posthog_instancesetting"."key", @@ -3830,3 +3847,36 @@ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ ' --- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.97 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.98 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- +# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.99 + ' + SELECT "posthog_instancesetting"."id", + "posthog_instancesetting"."key", + "posthog_instancesetting"."raw_value" + FROM "posthog_instancesetting" + WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS' + ORDER BY "posthog_instancesetting"."id" ASC + LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/ + ' +--- diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index af0efd4023fe7..245b0ceb08720 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -40,6 +40,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -51,6 +52,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -157,6 +159,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -168,6 +171,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -267,6 +271,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -278,6 +283,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -468,6 +474,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -479,6 +486,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -626,6 +634,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -637,6 +646,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -795,6 +805,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -806,6 +817,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -951,6 +963,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -962,6 +975,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1181,6 +1195,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1192,6 +1207,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1230,6 +1246,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1241,6 +1258,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1376,6 +1394,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1387,6 +1406,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1478,6 +1498,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1489,6 +1510,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1527,6 +1549,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1538,6 +1561,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1671,6 +1695,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1682,6 +1707,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -1789,6 +1815,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -1800,6 +1827,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2040,6 +2068,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2051,6 +2080,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2272,6 +2302,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2283,6 +2314,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2390,6 +2422,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2401,6 +2434,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2501,6 +2535,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2512,6 +2547,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2612,6 +2648,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2623,6 +2660,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2703,6 +2741,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2714,6 +2753,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2845,6 +2885,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2856,6 +2897,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -2933,6 +2975,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -2944,6 +2987,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3048,6 +3092,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3059,6 +3104,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3163,6 +3209,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3174,6 +3221,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3289,6 +3337,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3300,6 +3349,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3600,6 +3650,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3611,6 +3662,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3750,6 +3802,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3761,6 +3814,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3874,6 +3928,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3885,6 +3940,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -3951,6 +4007,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -3962,6 +4019,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4104,6 +4162,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4115,6 +4174,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4153,6 +4213,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4164,6 +4225,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4268,6 +4330,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4279,6 +4342,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4409,6 +4473,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4420,6 +4485,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4825,6 +4891,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4836,6 +4903,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -4956,6 +5024,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -4967,6 +5036,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5033,6 +5103,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5044,6 +5115,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5148,6 +5220,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5159,6 +5232,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5224,6 +5298,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5235,6 +5310,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5273,6 +5349,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5284,6 +5361,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5388,6 +5466,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5399,6 +5478,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5520,6 +5600,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5531,6 +5612,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -5674,6 +5756,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -5685,6 +5768,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6072,6 +6156,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6083,6 +6168,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6211,6 +6297,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6222,6 +6309,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6383,6 +6471,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6394,6 +6483,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6541,6 +6631,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6552,6 +6643,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6671,6 +6763,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6682,6 +6775,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6752,6 +6846,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6763,6 +6858,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -6908,6 +7004,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -6919,6 +7016,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -7538,6 +7636,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -7549,6 +7648,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -7780,6 +7880,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -7791,6 +7892,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -7933,6 +8035,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -7944,6 +8047,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -7982,6 +8086,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -7993,6 +8098,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8097,6 +8203,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8108,6 +8215,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8238,6 +8346,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8249,6 +8358,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8353,6 +8463,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8364,6 +8475,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8480,6 +8592,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8491,6 +8604,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8612,6 +8726,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8623,6 +8738,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -8912,6 +9028,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -8923,6 +9040,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9058,6 +9176,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9069,6 +9188,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9153,6 +9273,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9164,6 +9285,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9274,6 +9396,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9285,6 +9408,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9392,6 +9516,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9403,6 +9528,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9513,6 +9639,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9524,6 +9651,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9685,6 +9813,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9696,6 +9825,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9834,6 +9964,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9845,6 +9976,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -9929,6 +10061,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -9940,6 +10073,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10081,6 +10215,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10092,6 +10227,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10248,6 +10384,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10259,6 +10396,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10350,6 +10488,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10361,6 +10500,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10502,6 +10642,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10513,6 +10654,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10631,6 +10773,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10642,6 +10785,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -10831,6 +10975,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -10842,6 +10987,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index 396f5103c7ec3..299074ec3d44b 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -40,6 +40,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -51,6 +52,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -76,6 +78,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -94,6 +97,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -120,6 +124,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -131,6 +136,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -169,6 +175,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -180,6 +187,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -277,6 +285,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -288,6 +297,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -479,6 +489,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -490,6 +501,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -555,6 +567,7 @@ "posthog_notebook"."team_id", "posthog_notebook"."title", "posthog_notebook"."content", + "posthog_notebook"."text_content", "posthog_notebook"."deleted", "posthog_notebook"."version", "posthog_notebook"."created_at", @@ -572,6 +585,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -583,6 +597,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -671,6 +686,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -682,6 +698,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py index 3f49024d708e9..1b7f36ae54ce3 100644 --- a/posthog/api/test/notebooks/test_notebook.py +++ b/posthog/api/test/notebooks/test_notebook.py @@ -1,4 +1,4 @@ -from typing import List, Dict, Optional +from typing import List, Dict from unittest import mock from freezegun import freeze_time @@ -67,17 +67,20 @@ def test_cannot_list_deleted_notebook(self) -> None: @parameterized.expand( [ - ("without_content", None), - ("with_content", {"some": "kind", "of": "tip", "tap": "content"}), + ("without_content", None, None), + ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"), ] ) - def test_create_a_notebook(self, _, content: Optional[Dict]) -> None: - response = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={"content": content}) + def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None: + response = self.client.post( + f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content} + ) assert response.status_code == status.HTTP_201_CREATED assert response.json() == { "id": response.json()["id"], "short_id": response.json()["short_id"], "content": content, + "text_content": text_content, "title": None, "version": 0, "created_at": mock.ANY, diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py index 4e9f9370c178d..5f634de548fc7 100644 --- a/posthog/api/test/notebooks/test_notebook_filtering.py +++ b/posthog/api/test/notebooks/test_notebook_filtering.py @@ -42,7 +42,7 @@ }, } -BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}]} +BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text} class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest): @@ -62,20 +62,22 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit @parameterized.expand( [ - ["some text", [0]], - ["other text", [1]], - ["text", [0, 1]], + ["i ride", [0]], + ["pony", [0]], + ["ponies", [0]], + ["my hobby", [1]], + ["around", [0, 1]], ["random", []], ] ) def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None: notebook_ids = [ - self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="some text"), - self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="other text"), + self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"), + self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"), ] response = self.client.get( - f"/api/projects/{self.team.id}/notebooks?s={search_text}", + f"/api/projects/{self.team.id}/notebooks?search={search_text}", ) assert response.status_code == status.HTTP_200_OK @@ -83,6 +85,32 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes: assert len(results) == len(expected_match_indexes) assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes]) + @parameterized.expand( + [ + ["pony", [0]], + ["pOnY", [0]], + ["ponies", [0]], + ["goat", [1]], + ["ride", [0, 1]], + ["neither", []], + ] + ) + def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None: + notebook_ids = [ + # will match both pony and ponies + self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"), + self._create_notebook_with_content([BASIC_TEXT("but may not ride a goat")], title="never matches"), + ] + + response = self.client.get( + f"/api/projects/{self.team.id}/notebooks?search={search_text}", + ) + assert response.status_code == status.HTTP_200_OK + + results = response.json()["results"] + assert len(results) == len(expected_match_indexes) + assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes]) + def test_filters_based_on_params(self) -> None: other_user = User.objects.create_and_join(self.organization, "other@posthog.com", "password") notebook_one = Notebook.objects.create(team=self.team, created_by=self.user) diff --git a/posthog/api/test/test_annotation.py b/posthog/api/test/test_annotation.py index 82089a5c7ea6b..c559411f607d5 100644 --- a/posthog/api/test/test_annotation.py +++ b/posthog/api/test/test_annotation.py @@ -1,7 +1,7 @@ from datetime import datetime from unittest.mock import patch -import pytz +from zoneinfo import ZoneInfo from django.utils.timezone import now from rest_framework import status @@ -111,7 +111,7 @@ def test_creating_annotation(self, mock_capture): "team": team2.pk, # make sure this is set automatically }, ) - date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=pytz.UTC) + date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")) self.assertEqual(response.status_code, status.HTTP_201_CREATED) instance = Annotation.objects.get(pk=response.json()["id"]) self.assertEqual(instance.content, "Marketing campaign") diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py index 8679485030bf6..b7f746c84a473 100644 --- a/posthog/api/test/test_event.py +++ b/posthog/api/test/test_event.py @@ -3,7 +3,7 @@ from unittest.mock import patch from urllib.parse import unquote, urlencode -import pytz +from zoneinfo import ZoneInfo from dateutil import parser from dateutil.relativedelta import relativedelta from django.utils import timezone @@ -168,7 +168,6 @@ def test_custom_event_values(self): @also_test_with_materialized_columns(["random_prop"]) @snapshot_clickhouse_queries def test_event_property_values(self): - with freeze_time("2020-01-10"): _create_event( distinct_id="bla", @@ -346,8 +345,8 @@ def test_pagination_bounded_date_range(self): with freeze_time("2021-10-10T12:03:03.829294Z"): _create_person(team=self.team, distinct_ids=["1"]) now = timezone.now() - relativedelta(months=11) - after = (now).astimezone(pytz.utc).isoformat() - before = (now + relativedelta(days=23)).astimezone(pytz.utc).isoformat() + after = (now).astimezone(ZoneInfo("UTC")).isoformat() + before = (now + relativedelta(days=23)).astimezone(ZoneInfo("UTC")).isoformat() params = {"distinct_id": "1", "after": after, "before": before, "limit": 10} params_string = urlencode(params) for idx in range(0, 25): diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 9efebf97b878b..b0d6f73c87ebb 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -939,7 +939,7 @@ def test_my_flags_is_not_nplus1(self) -> None: format="json", ).json() - with self.assertNumQueries(9): + with self.assertNumQueries(10): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -950,7 +950,7 @@ def test_my_flags_is_not_nplus1(self) -> None: format="json", ).json() - with self.assertNumQueries(9): + with self.assertNumQueries(10): response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags") self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index a9697252901f4..8becdf0ac7e60 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -5,7 +5,7 @@ from unittest.case import skip from unittest.mock import patch -import pytz +from zoneinfo import ZoneInfo from django.test import override_settings from django.utils import timezone from freezegun import freeze_time @@ -1860,7 +1860,7 @@ def test_create_insight_viewed(self) -> None: self.assertEqual(created_insight_viewed.user, self.user) self.assertEqual( created_insight_viewed.last_viewed_at, - datetime(2022, 3, 22, 0, 0, tzinfo=pytz.UTC), + datetime(2022, 3, 22, 0, 0, tzinfo=ZoneInfo("UTC")), ) def test_update_insight_viewed(self) -> None: @@ -1882,7 +1882,7 @@ def test_update_insight_viewed(self) -> None: updated_insight_viewed = InsightViewed.objects.all()[0] self.assertEqual( updated_insight_viewed.last_viewed_at, - datetime(2022, 3, 23, 0, 0, tzinfo=pytz.UTC), + datetime(2022, 3, 23, 0, 0, tzinfo=ZoneInfo("UTC")), ) def test_cant_view_insight_viewed_for_insight_in_another_team(self) -> None: diff --git a/posthog/api/test/test_organization_domain.py b/posthog/api/test/test_organization_domain.py index fe4a4e5afa12a..2615880cbff20 100644 --- a/posthog/api/test/test_organization_domain.py +++ b/posthog/api/test/test_organization_domain.py @@ -4,7 +4,7 @@ import dns.resolver import dns.rrset import pytest -import pytz +from zoneinfo import ZoneInfo from django.utils import timezone from freezegun import freeze_time from rest_framework import status @@ -133,7 +133,7 @@ def test_creating_domain_on_self_hosted_is_automatically_verified(self): instance = OrganizationDomain.objects.get(id=response_data["id"]) self.assertEqual(instance.domain, "the.posthog.com") - self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC)) + self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC"))) self.assertEqual(instance.last_verification_retry, None) self.assertEqual(instance.sso_enforcement, "") @@ -200,7 +200,7 @@ def test_can_request_verification_for_unverified_domains(self, mock_dns_query): self.assertEqual(response_data["verified_at"], self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ")) self.assertEqual(response_data["is_verified"], True) - self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC)) + self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC"))) self.assertEqual(self.domain.is_verified, True) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -220,7 +220,7 @@ def test_domain_is_not_verified_with_missing_challenge(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC) + self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) ) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -240,7 +240,7 @@ def test_domain_is_not_verified_with_missing_domain(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC) + self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) ) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -262,7 +262,7 @@ def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC) + self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) ) def test_cannot_request_verification_for_verified_domains(self): diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py index d393b00910ab3..c9ae3b26c359a 100644 --- a/posthog/api/test/test_plugin.py +++ b/posthog/api/test/test_plugin.py @@ -5,7 +5,7 @@ from unittest import mock from unittest.mock import ANY, patch -import pytz +from zoneinfo import ZoneInfo from django.core.files.uploadedfile import SimpleUploadedFile from freezegun import freeze_time from rest_framework import status @@ -269,7 +269,7 @@ def test_update_plugin_auth(self, mock_sync_from_plugin_archive, mock_get, mock_ plugin = Plugin.objects.get(id=response.json()["id"]) - fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC) + fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC")) self.assertNotEqual(plugin.updated_at, fake_date) with freeze_time(fake_date.isoformat()): @@ -715,7 +715,7 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload): name="FooBar2", plugins_access_level=Organization.PluginsAccessLevel.INSTALL ) - fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC) + fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC")) with freeze_time(fake_date.isoformat()): response = self.client.post( f"/api/organizations/{my_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} @@ -1281,7 +1281,7 @@ def test_check_for_updates_plugins_reload_not_called(self, _, mock_reload): plugin_id = response.json()["id"] plugin = Plugin.objects.get(id=plugin_id) - fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC) + fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC")) self.assertNotEqual(plugin.latest_tag_checked_at, fake_date) with freeze_time(fake_date.isoformat()): diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index c4439a8913b31..82e89c7805c5c 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -5,7 +5,7 @@ from unittest.mock import ANY, patch import pytest -import pytz +from zoneinfo import ZoneInfo from django.core import mail from django.urls.base import reverse from django.utils import timezone @@ -733,7 +733,7 @@ def test_api_invite_sign_up_prevalidate_expired_invite(self): invite: OrganizationInvite = OrganizationInvite.objects.create( target_email="test+59@posthog.com", organization=self.organization ) - invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=pytz.UTC) + invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=ZoneInfo("UTC")) invite.save() response = self.client.get(f"/api/signup/{invite.id}/") @@ -1132,7 +1132,7 @@ def test_cant_claim_expired_invite(self): invite: OrganizationInvite = OrganizationInvite.objects.create( target_email="test+799@posthog.com", organization=self.organization ) - invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=pytz.UTC) + invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=ZoneInfo("UTC")) invite.save() response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Charlie", "password": "test_password"}) diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 45e13024c1a0b..f393e5cec4379 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -77,6 +77,80 @@ def test_can_create_survey_with_linked_flag_and_targeting(self): {"type": "open", "question": "What would you want to improve from notebooks?"} ] + def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None: + self.maxDiff = None + + ff_key = "notebooks" + notebooks_flag = FeatureFlag.objects.create(team=self.team, key=ff_key, created_by=self.user) + + response = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "Notebooks power users survey", + "type": "popover", + "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "linked_flag_id": notebooks_flag.id, + "targeting_flag_filters": { + "groups": [ + { + "variant": None, + "rollout_percentage": None, + "properties": [ + {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + ], + } + ] + }, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ) + + response_data = response.json() + assert response.status_code == status.HTTP_201_CREATED, response_data + assert response_data["linked_flag"]["id"] == notebooks_flag.id + assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists() + + created_survey1 = response.json()["id"] + + response = self.client.post( + f"/api/projects/{self.team.id}/surveys/", + data={ + "name": "Notebooks random survey", + "type": "popover", + "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "linked_flag_id": notebooks_flag.id, + "conditions": {"url": "https://app.posthog.com/notebooks"}, + }, + format="json", + ) + + response_data = response.json() + assert response.status_code == status.HTTP_201_CREATED, response_data + assert response_data["linked_flag"]["id"] == notebooks_flag.id + assert response_data["targeting_flag"] is None + + created_survey2 = response.json()["id"] + + # add another random feature flag + self.client.post( + f"/api/projects/{self.team.id}/feature_flags/", + data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}}, + format="json", + ).json() + + with self.assertNumQueries(12): + response = self.client.get(f"/api/projects/{self.team.id}/feature_flags") + self.assertEqual(response.status_code, status.HTTP_200_OK) + result = response.json() + + self.assertEqual(result["count"], 2) + + self.assertEqual( + [(res["key"], [survey["id"] for survey in res["surveys"]]) for res in result["results"]], + [("flag_0", []), (ff_key, [created_survey1, created_survey2])], + ) + def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): survey_with_targeting = self.client.post( f"/api/projects/{self.team.id}/surveys/", diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 5aa0fa7d18e22..b5eb182e68a70 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -52,6 +52,8 @@ class S3BatchExportInputs: data_interval_end: str | None = None compression: str | None = None exclude_events: list[str] | None = None + encryption: str | None = None + kms_key_id: str | None = None @dataclass diff --git a/posthog/caching/test/test_should_refresh_insight.py b/posthog/caching/test/test_should_refresh_insight.py index 26fcfaf01531a..12fb385ef2926 100644 --- a/posthog/caching/test/test_should_refresh_insight.py +++ b/posthog/caching/test/test_should_refresh_insight.py @@ -1,9 +1,9 @@ from datetime import datetime, timedelta from time import sleep from unittest.mock import patch +from zoneinfo import ZoneInfo from django.http import HttpRequest -import pytz from freezegun import freeze_time from rest_framework.request import Request from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME @@ -25,7 +25,7 @@ def __init__(self, *args, **kwargs) -> None: def test_should_return_true_if_refresh_not_requested(self): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1) + last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1) ) # .GET["refresh"] is absent in the request below! @@ -47,7 +47,7 @@ def test_should_return_true_if_refresh_not_requested(self): def test_should_return_true_if_refresh_requested(self): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1) + last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1) ) should_refresh_now, refresh_frequency = should_refresh_insight(insight, None, request=self.refresh_request) @@ -67,7 +67,7 @@ def test_should_return_true_if_insight_does_not_have_last_refresh(self): def test_shared_insights_can_be_refreshed_less_often(self): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1) + last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1) ) should_refresh_now, refresh_frequency = should_refresh_insight( @@ -130,7 +130,7 @@ def test_dashboard_filters_should_override_insight_filters_when_deciding_on_refr def test_should_return_true_if_was_recently_refreshed(self): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) + last_refresh=datetime.now(tz=ZoneInfo("UTC")) ) request = HttpRequest() @@ -143,10 +143,10 @@ def test_should_return_true_if_was_recently_refreshed(self): def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mock_sleep): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1), + last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1), # This insight is being calculated _somewhere_, since it was last refreshed # earlier than the recent refresh has been queued - last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC")) + last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=CLICKHOUSE_MAX_EXECUTION_TIME - 0.5), # Half a second before timeout ) @@ -161,10 +161,10 @@ def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mo def test_should_return_true_if_refresh_timed_out_elsewhere_before(self): insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {}) InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update( - last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1), + last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1), # last_refresh is earlier than last_refresh_queued_at BUT last_refresh_queued_at is more than # CLICKHOUSE_MAX_EXECUTION_TIME seconds ago. This means the query CANNOT be running at this time. - last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC")) - timedelta(seconds=500), + last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=500), ) should_refresh_now, _ = should_refresh_insight(insight, None, request=self.refresh_request) diff --git a/posthog/clickhouse/migrations/0048_session_replay_events_count.py b/posthog/clickhouse/migrations/0048_session_replay_events_count.py new file mode 100644 index 0000000000000..d4676e2794884 --- /dev/null +++ b/posthog/clickhouse/migrations/0048_session_replay_events_count.py @@ -0,0 +1,26 @@ +from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions +from posthog.models.session_replay_event.migrations_sql import ( + DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL, + DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL, + ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL, + ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL, + ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL, +) +from posthog.models.session_replay_event.sql import ( + SESSION_REPLAY_EVENTS_TABLE_MV_SQL, + KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL, +) + +operations = [ + # we have to drop materialized view first so that we're no longer pulling from kakfa + # then we drop the kafka table + run_sql_with_exceptions(DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL()), + run_sql_with_exceptions(DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()), + # now we can alter the target tables + run_sql_with_exceptions(ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL()), + run_sql_with_exceptions(ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL()), + run_sql_with_exceptions(ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL()), + # and then recreate the materialized views and kafka tables + run_sql_with_exceptions(KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()), + run_sql_with_exceptions(SESSION_REPLAY_EVENTS_TABLE_MV_SQL()), +] diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py index 2f0924b080319..417525330a96c 100644 --- a/posthog/clickhouse/system_status.py +++ b/posthog/clickhouse/system_status.py @@ -1,7 +1,7 @@ from datetime import timedelta from os.path import abspath, dirname, join from typing import Dict, Generator, List, Tuple -import pytz +from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils import timezone @@ -103,7 +103,7 @@ def system_status() -> Generator[SystemStatusRow, None, None]: last_event_ingested_timestamp = sync_execute("SELECT max(_timestamp) FROM events")[0][0] # Therefore we can confidently apply the UTC timezone - last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=pytz.UTC) + last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=ZoneInfo("UTC")) yield { "key": "last_event_ingested_timestamp", diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index 36ab529259c77..ac21b1ac5989f 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -336,7 +336,9 @@ console_log_count Int64, console_warn_count Int64, console_error_count Int64, - size Int64 + size Int64, + event_count Int64, + message_count Int64 ) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow') ' @@ -922,7 +924,9 @@ console_log_count Int64, console_warn_count Int64, console_error_count Int64, - size Int64 + size Int64, + event_count Int64, + message_count Int64 ) ENGINE = Kafka('kafka:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow') ' @@ -1344,7 +1348,15 @@ console_warn_count SimpleAggregateFunction(sum, Int64), console_error_count SimpleAggregateFunction(sum, Int64), -- this column allows us to estimate the amount of data that is being ingested - size SimpleAggregateFunction(sum, Int64) + size SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of messages received in a session + -- often very useful in incidents or debugging + message_count SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of snapshot events received in a session + -- often very useful in incidents or debugging + -- because we batch events we expect message_count to be lower than event_count + event_count SimpleAggregateFunction(sum, Int64), + _timestamp SimpleAggregateFunction(max, DateTime) ) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_session_replay_events', sipHash64(distinct_id)) ' @@ -1377,7 +1389,11 @@ sum(console_log_count) as console_log_count, sum(console_warn_count) as console_warn_count, sum(console_error_count) as console_error_count, - sum(size) as size + sum(size) as size, + -- we can count the number of kafka messages instead of sending it explicitly + sum(message_count) as message_count, + sum(event_count) as event_count, + max(_timestamp) as _timestamp FROM posthog_test.kafka_session_replay_events group by session_id, team_id @@ -1608,7 +1624,15 @@ console_warn_count SimpleAggregateFunction(sum, Int64), console_error_count SimpleAggregateFunction(sum, Int64), -- this column allows us to estimate the amount of data that is being ingested - size SimpleAggregateFunction(sum, Int64) + size SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of messages received in a session + -- often very useful in incidents or debugging + message_count SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of snapshot events received in a session + -- often very useful in incidents or debugging + -- because we batch events we expect message_count to be lower than event_count + event_count SimpleAggregateFunction(sum, Int64), + _timestamp SimpleAggregateFunction(max, DateTime) ) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}') PARTITION BY toYYYYMM(min_first_timestamp) @@ -2226,7 +2250,15 @@ console_warn_count SimpleAggregateFunction(sum, Int64), console_error_count SimpleAggregateFunction(sum, Int64), -- this column allows us to estimate the amount of data that is being ingested - size SimpleAggregateFunction(sum, Int64) + size SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of messages received in a session + -- often very useful in incidents or debugging + message_count SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of snapshot events received in a session + -- often very useful in incidents or debugging + -- because we batch events we expect message_count to be lower than event_count + event_count SimpleAggregateFunction(sum, Int64), + _timestamp SimpleAggregateFunction(max, DateTime) ) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}') PARTITION BY toYYYYMM(min_first_timestamp) diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py index dd337d487aba7..f0d33c7d617f4 100644 --- a/posthog/clickhouse/test/test_person_overrides.py +++ b/posthog/clickhouse/test/test_person_overrides.py @@ -5,7 +5,7 @@ from uuid import UUID, uuid4 import pytest -import pytz +from zoneinfo import ZoneInfo from kafka import KafkaProducer from posthog.clickhouse.client import sync_execute @@ -35,9 +35,9 @@ def test_can_insert_person_overrides(): old_person_id = uuid4() override_person_id = uuid4() oldest_event_string = "2020-01-01 00:00:00" - oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=pytz.UTC) + oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=ZoneInfo("UTC")) merged_at_string = "2020-01-02 00:00:00" - merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=pytz.UTC) + merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=ZoneInfo("UTC")) message = { "team_id": 1, "old_person_id": str(old_person_id), @@ -82,7 +82,7 @@ def test_can_insert_person_overrides(): [result] = results created_at, *the_rest = result assert the_rest == [1, old_person_id, override_person_id, oldest_event, merged_at, 2] - assert created_at > datetime.now(tz=pytz.UTC) - timedelta(seconds=10) + assert created_at > datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=10) finally: producer.close() diff --git a/posthog/datetime.py b/posthog/datetime.py index 8dc6b6975fb8d..b8c4910e8b374 100644 --- a/posthog/datetime.py +++ b/posthog/datetime.py @@ -1,13 +1,22 @@ from datetime import datetime, timedelta -def end_of_day(reference_date: datetime): - return datetime( - year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo - ) + timedelta(days=1, microseconds=-1) +def start_of_hour(dt: datetime) -> datetime: + return datetime(year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, tzinfo=dt.tzinfo) -def start_of_day(reference_date: datetime): - return datetime( - year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo - ) +def start_of_day(dt: datetime): + return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) + + +def end_of_day(dt: datetime): + return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) + timedelta(days=1, microseconds=-1) + + +def start_of_week(dt: datetime) -> datetime: + # weeks start on sunday + return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) - timedelta(days=(dt.weekday() + 1) % 7) + + +def start_of_month(dt: datetime) -> datetime: + return datetime(year=dt.year, month=dt.month, day=1, tzinfo=dt.tzinfo) diff --git a/posthog/decorators.py b/posthog/decorators.py index 2cefc1bb23f53..19b1bc33f98ae 100644 --- a/posthog/decorators.py +++ b/posthog/decorators.py @@ -1,7 +1,10 @@ +from datetime import datetime from enum import Enum from functools import wraps from typing import Any, Callable, Dict, List, TypeVar, Union, cast +from zoneinfo import ZoneInfo +import posthoganalytics from django.urls import resolve from django.utils.timezone import now from rest_framework.request import Request @@ -9,8 +12,15 @@ from statshog.defaults.django import statsd from posthog.clickhouse.query_tagging import tag_queries +from posthog.cloud_utils import is_cloud +from posthog.datetime import start_of_day, start_of_hour, start_of_month, start_of_week from posthog.models import User +from posthog.models.filters.filter import Filter +from posthog.models.filters.path_filter import PathFilter +from posthog.models.filters.retention_filter import RetentionFilter +from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.filters.utils import get_filter +from posthog.models.team.team import Team from posthog.utils import refresh_requested_by_client from .utils import generate_cache_key, get_safe_cache @@ -74,9 +84,12 @@ def wrapper(self, request) -> T: route = "unknown" if cached_result_package and cached_result_package.get("result"): - cached_result_package["is_cached"] = True - statsd.incr("posthog_cached_function_cache_hit", tags={"route": route}) - return cached_result_package + if not is_stale(team, filter, cached_result_package): + cached_result_package["is_cached"] = True + statsd.incr("posthog_cached_function_cache_hit", tags={"route": route}) + return cached_result_package + else: + statsd.incr("posthog_cached_function_cache_stale", tags={"route": route}) else: statsd.incr("posthog_cached_function_cache_miss", tags={"route": route}) @@ -93,3 +106,49 @@ def wrapper(self, request) -> T: return fresh_result_package return wrapper + + +def stale_cache_invalidation_disabled(team: Team) -> bool: + """Can be disabled temporarly to help in cases of service degradation.""" + if is_cloud(): # on PostHog Cloud, use the feature flag + return not posthoganalytics.feature_enabled( + "stale-cache-invalidation-enabled", + str(team.uuid), + groups={"organization": str(team.organization.id)}, + group_properties={ + "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at} + }, + only_evaluate_locally=True, + send_feature_flag_events=False, + ) + else: + return False + + +def is_stale(team: Team, filter: Filter | RetentionFilter | StickinessFilter | PathFilter, cached_result: Any) -> bool: + """Indicates wether a cache item is obviously outdated based on filters, + i.e. the next time interval was entered since the last computation. For + example an insight with -7d date range that was last computed yesterday. + The same insight refreshed today wouldn't be marked as stale. + """ + + if stale_cache_invalidation_disabled(team): + return False + + last_refresh = cached_result.get("last_refresh", None) + date_to = min([filter.date_to, datetime.now(tz=ZoneInfo("UTC"))]) # can't be later than now + interval = filter.period.lower() if isinstance(filter, RetentionFilter) else filter.interval + + if last_refresh is None: + raise Exception("Cached results require a last_refresh") + + if interval == "hour": + return start_of_hour(date_to) > start_of_hour(last_refresh) + elif interval == "day": + return start_of_day(date_to) > start_of_day(last_refresh) + elif interval == "week": + return start_of_week(date_to) > start_of_week(last_refresh) + elif interval == "month": + return start_of_month(date_to) > start_of_month(last_refresh) + else: + return False diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py index 68272413aa2ba..132f3d6ac5f32 100644 --- a/posthog/demo/products/hedgebox/models.py +++ b/posthog/demo/products/hedgebox/models.py @@ -14,6 +14,7 @@ ) import pytz +from zoneinfo import ZoneInfo from posthog.demo.matrix.models import Effect, SimPerson, SimSessionIntent @@ -673,7 +674,7 @@ def upgrade_plan(self): if not self.account.was_billing_scheduled: self.account.was_billing_scheduled = True future_months = math.ceil( - (self.cluster.end.astimezone(pytz.timezone(self.timezone)) - self.cluster.simulation_time).days / 30 + (self.cluster.end.astimezone(ZoneInfo(self.timezone)) - self.cluster.simulation_time).days / 30 ) for i in range(future_months): bill_timestamp = self.cluster.simulation_time + dt.timedelta(days=30 * i) diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py index 27463e1bd692d..99f0451c5485d 100644 --- a/posthog/demo/test/test_matrix_manager.py +++ b/posthog/demo/test/test_matrix_manager.py @@ -2,7 +2,7 @@ from enum import auto from typing import Optional -import pytz +from zoneinfo import ZoneInfo from posthog.client import sync_execute from posthog.demo.matrix.manager import MatrixManager @@ -54,7 +54,9 @@ class TestMatrixManager(ClickhouseDestroyTablesMixin): @classmethod def setUpTestData(cls): super().setUpTestData() - cls.matrix = DummyMatrix(n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), days_future=0) + cls.matrix = DummyMatrix( + n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), days_future=0 + ) cls.matrix.simulate() def test_reset_master(self): diff --git a/posthog/errors.py b/posthog/errors.py index 5cd3342f7a3fa..b2d34ed858448 100644 --- a/posthog/errors.py +++ b/posthog/errors.py @@ -206,7 +206,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta: 131: ErrorCodeMeta("TOO_LARGE_STRING_SIZE"), 133: ErrorCodeMeta("AGGREGATE_FUNCTION_DOESNT_ALLOW_PARAMETERS"), 134: ErrorCodeMeta("PARAMETERS_TO_AGGREGATE_FUNCTIONS_MUST_BE_LITERALS"), - 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX"), + 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX", user_safe=True), 137: ErrorCodeMeta("UNKNOWN_ELEMENT_IN_CONFIG"), 138: ErrorCodeMeta("EXCESSIVE_ELEMENT_IN_CONFIG"), 139: ErrorCodeMeta("NO_ELEMENTS_IN_CONFIG"), diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py index c4f1980df5491..b8d79e86d9780 100644 --- a/posthog/hogql/database/schema/session_replay_events.py +++ b/posthog/hogql/database/schema/session_replay_events.py @@ -31,6 +31,8 @@ "console_warn_count": IntegerDatabaseField(name="console_warn_count"), "console_error_count": IntegerDatabaseField(name="console_error_count"), "size": IntegerDatabaseField(name="size"), + "event_count": IntegerDatabaseField(name="event_count"), + "message_count": IntegerDatabaseField(name="message_count"), "pdi": LazyJoin( from_field="distinct_id", join_table=PersonDistinctIdsTable(), @@ -77,6 +79,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str "console_error_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "console_error_count"])]), "distinct_id": ast.Call(name="any", args=[ast.Field(chain=[table_name, "distinct_id"])]), "size": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "size"])]), + "event_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "event_count"])]), + "message_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "message_count"])]), } select_fields: List[ast.Expr] = [] diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 166391d344856..9e1413d84a0bf 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -276,6 +276,14 @@ "key": "size", "type": "integer" }, + { + "key": "event_count", + "type": "integer" + }, + { + "key": "message_count", + "type": "integer" + }, { "key": "pdi", "type": "lazy_table", @@ -405,6 +413,14 @@ "key": "size", "type": "integer" }, + { + "key": "event_count", + "type": "integer" + }, + { + "key": "message_count", + "type": "integer" + }, { "key": "pdi", "type": "lazy_table", @@ -849,6 +865,14 @@ "key": "size", "type": "integer" }, + { + "key": "event_count", + "type": "integer" + }, + { + "key": "message_count", + "type": "integer" + }, { "key": "pdi", "type": "lazy_table", @@ -978,6 +1002,14 @@ "key": "size", "type": "integer" }, + { + "key": "event_count", + "type": "integer" + }, + { + "key": "message_count", + "type": "integer" + }, { "key": "pdi", "type": "lazy_table", diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py index 670b98cfd45e5..bd63ce32754c0 100644 --- a/posthog/hogql/placeholders.py +++ b/posthog/hogql/placeholders.py @@ -32,7 +32,7 @@ def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]): def visit_placeholder(self, node): if not self.placeholders: raise HogQLException(f"Placeholders, such as {{{node.field}}}, are not supported in this context") - if node.field in self.placeholders: + if node.field in self.placeholders and self.placeholders[node.field] is not None: new_node = self.placeholders[node.field] new_node.start = node.start new_node.end = node.end diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index 3caa10d51f8f6..81efafc225a1f 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -15,7 +15,7 @@ from posthog.models.property import PropertyGroup from posthog.models.property.util import build_selector_regex from posthog.models.property_definition import PropertyType -from posthog.schema import PropertyOperator +from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator def has_aggregation(expr: AST) -> bool: @@ -59,16 +59,30 @@ def property_to_expr(property: Union[BaseModel, PropertyGroup, Property, dict, l return ast.And(exprs=properties) elif isinstance(property, Property): pass - elif isinstance(property, PropertyGroup): - if property.type != PropertyOperatorType.AND and property.type != PropertyOperatorType.OR: + elif ( + isinstance(property, PropertyGroup) + or isinstance(property, PropertyGroupFilter) + or isinstance(property, PropertyGroupFilterValue) + ): + if ( + isinstance(property, PropertyGroup) + and property.type != PropertyOperatorType.AND + and property.type != PropertyOperatorType.OR + ): raise NotImplementedException(f'PropertyGroup of unknown type "{property.type}"') + if ( + (isinstance(property, PropertyGroupFilter) or isinstance(property, PropertyGroupFilterValue)) + and property.type != FilterLogicalOperator.AND + and property.type != FilterLogicalOperator.OR + ): + raise NotImplementedException(f'PropertyGroupFilter of unknown type "{property.type}"') if len(property.values) == 0: return ast.Constant(value=True) if len(property.values) == 1: return property_to_expr(property.values[0], team) - if property.type == PropertyOperatorType.AND: + if property.type == PropertyOperatorType.AND or property.type == FilterLogicalOperator.AND: return ast.And(exprs=[property_to_expr(p, team) for p in property.values]) else: return ast.Or(exprs=[property_to_expr(p, team) for p in property.values]) diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py index e939d9ce8aef6..ed84eeaf4af6d 100644 --- a/posthog/hogql/test/test_query.py +++ b/posthog/hogql/test/test_query.py @@ -1,6 +1,6 @@ from uuid import UUID -import pytz +from zoneinfo import ZoneInfo from django.test import override_settings from django.utils import timezone from freezegun import freeze_time @@ -817,21 +817,21 @@ def test_window_functions_simple(self): expected += [ ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")), "random event", [], ["random bla", "random boo"], ), ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")), "random bla", ["random event"], ["random boo"], ), ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")), "random boo", ["random event", "random bla"], [], @@ -902,7 +902,7 @@ def test_window_functions_with_window(self): expected += [ ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")), "random event", [], ["random bla", "random boo"], @@ -917,7 +917,7 @@ def test_window_functions_with_window(self): ), ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")), "random bla", ["random event"], ["random boo"], @@ -932,7 +932,7 @@ def test_window_functions_with_window(self): ), ( f"person_{person}_{random_uuid}", - datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC), + datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")), "random boo", ["random event", "random bla"], [], @@ -1226,7 +1226,7 @@ def test_null_equality(self): ("null", "!~*", "null", 0), ] - for (a, op, b, res) in expected: + for a, op, b, res in expected: # works when selecting directly query = f"select {a} {op} {b}" response = execute_hogql_query(query, team=self.team) diff --git a/posthog/hogql_queries/lifecycle_hogql_query.py b/posthog/hogql_queries/lifecycle_hogql_query.py deleted file mode 100644 index 2df71a976d1a9..0000000000000 --- a/posthog/hogql_queries/lifecycle_hogql_query.py +++ /dev/null @@ -1,176 +0,0 @@ -from typing import Dict, Any - -from django.utils.timezone import datetime - -from posthog.hogql import ast -from posthog.hogql.parser import parse_expr, parse_select -from posthog.hogql.query import execute_hogql_query -from posthog.hogql_queries.query_date_range import QueryDateRange -from posthog.models import Team -from posthog.schema import LifecycleQuery - - -def create_time_filter(date_range: QueryDateRange) -> ast.Expr: - # don't need timezone here, as HogQL will use the project timezone automatically - # :TRICKY: We fetch all data even for the period before the graph starts up until the end of the last period - time_filter = parse_expr( - """ - (timestamp >= dateTrunc({interval}, {date_from}) - {one_interval_period}) - AND - (timestamp < dateTrunc({interval}, {date_to}) + {one_interval_period}) - """, - placeholders={ - "date_from": date_range.date_from_as_hogql, - "date_to": date_range.date_to_as_hogql, - "one_interval_period": date_range.one_interval_period_as_hogql, - "interval": date_range.interval_period_string_as_hogql, - }, - ) - - return time_filter - - -def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr): - if not event_filter: - event_filter = ast.Constant(value=True) - - placeholders = { - "event_filter": event_filter, - "interval": date_range.interval_period_string_as_hogql, - "one_interval_period": date_range.one_interval_period_as_hogql, - } - - events_query = parse_select( - """ - SELECT - events.person.id as person_id, - min(events.person.created_at) AS created_at, - arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity, - arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity, - arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity, - arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status, - arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status, - arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods, - arrayMap(x -> 'dormant', dormant_periods) as dormant_label, - arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat, - arrayJoin(temp_concat) as period_status_pairs, - period_status_pairs.1 as start_of_period, - period_status_pairs.2 as status - FROM events - WHERE {event_filter} - GROUP BY person_id - """, - placeholders=placeholders, - ) - return events_query - - -def run_lifecycle_query( - team: Team, - query: LifecycleQuery, -) -> Dict[str, Any]: - now_dt = datetime.now() - - query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt) - - interval = query_date_range.interval.name - one_interval_period = query_date_range.one_interval_period_as_hogql - number_interval_period = query_date_range.interval_periods_as_hogql("number") - - time_filter = create_time_filter(query_date_range) - event_filter = time_filter # TODO: add all other filters - - placeholders = { - "interval": ast.Constant(value=interval), - "one_interval_period": one_interval_period, - "number_interval_period": number_interval_period, - "event_filter": event_filter, - "date_from": query_date_range.date_from_as_hogql, - "date_to": query_date_range.date_to_as_hogql, - } - - events_query = create_events_query(date_range=query_date_range, event_filter=event_filter) - - periods = parse_select( - """ - SELECT ( - dateTrunc({interval}, {date_to}) - {number_interval_period} - ) AS start_of_period - FROM numbers( - dateDiff( - {interval}, - dateTrunc({interval}, {date_from}), - dateTrunc({interval}, {date_to} + {one_interval_period}) - ) - ) - """, - placeholders=placeholders, - ) - - lifecycle_sql = parse_select( - """ - SELECT groupArray(start_of_period) AS date, - groupArray(counts) AS total, - status - FROM ( - SELECT - status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts, - start_of_period, - status - FROM ( - SELECT - periods.start_of_period as start_of_period, - 0 AS counts, - status - FROM {periods} as periods - CROSS JOIN ( - SELECT status - FROM (SELECT 1) - ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status - ) as sec - ORDER BY status, start_of_period - UNION ALL - SELECT - start_of_period, count(DISTINCT person_id) AS counts, status - FROM {events_query} - GROUP BY start_of_period, status - ) - WHERE start_of_period <= dateTrunc({interval}, {date_to}) - AND start_of_period >= dateTrunc({interval}, {date_from}) - GROUP BY start_of_period, status - ORDER BY start_of_period ASC - ) - GROUP BY status - """, - {**placeholders, "periods": periods, "events_query": events_query}, - ) - - response = execute_hogql_query( - team=team, - query=lifecycle_sql, - query_type="LifecycleQuery", - ) - - # ensure that the items are in a deterministic order - order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4} - results = sorted(response.results, key=lambda result: order.get(result[2], result[2])) - - res = [] - for val in results: - counts = val[1] - labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if interval == "hour" else "")) for item in val[0]] - days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if interval == "hour" else "")) for item in val[0]] - - label = "{} - {}".format("", val[2]) # entity.name - additional_values = {"label": label, "status": val[2]} - res.append( - { - "data": [float(c) for c in counts], - "count": float(sum(counts)), - "labels": labels, - "days": days, - **additional_values, - } - ) - - return {"result": res} diff --git a/posthog/hogql_queries/lifecycle_query_runner.py b/posthog/hogql_queries/lifecycle_query_runner.py new file mode 100644 index 0000000000000..2b970bb95156c --- /dev/null +++ b/posthog/hogql_queries/lifecycle_query_runner.py @@ -0,0 +1,252 @@ +from typing import Optional, Any, Dict, List + +from django.utils.timezone import datetime + +from posthog.hogql import ast +from posthog.hogql.parser import parse_expr, parse_select +from posthog.hogql.property import property_to_expr, action_to_expr +from posthog.hogql.query import execute_hogql_query +from posthog.hogql.timings import HogQLTimings +from posthog.hogql_queries.query_runner import QueryRunner +from posthog.models import Team, Action +from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.models.filters.mixins.utils import cached_property +from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse + + +class LifecycleQueryRunner(QueryRunner): + query: LifecycleQuery + + def __init__(self, query: LifecycleQuery | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None): + super().__init__(team, timings) + if isinstance(query, LifecycleQuery): + self.query = query + else: + self.query = LifecycleQuery.parse_obj(query) + + def to_query(self) -> ast.SelectQuery: + placeholders = { + **self.query_date_range.to_placeholders(), + "events_query": self.events_query, + "periods_query": self.periods_query, + } + with self.timings.measure("lifecycle_query"): + lifecycle_query = parse_select( + """ + SELECT groupArray(start_of_period) AS date, + groupArray(counts) AS total, + status + FROM ( + SELECT + status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts, + start_of_period, + status + FROM ( + SELECT + periods.start_of_period as start_of_period, + 0 AS counts, + status + FROM {periods_query} as periods + CROSS JOIN ( + SELECT status + FROM (SELECT 1) + ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status + ) as sec + ORDER BY status, start_of_period + UNION ALL + SELECT + start_of_period, count(DISTINCT person_id) AS counts, status + FROM {events_query} + GROUP BY start_of_period, status + ) + WHERE start_of_period <= dateTrunc({interval}, {date_to}) + AND start_of_period >= dateTrunc({interval}, {date_from}) + GROUP BY start_of_period, status + ORDER BY start_of_period ASC + ) + GROUP BY status + """, + placeholders, + timings=self.timings, + ) + return lifecycle_query + + def to_persons_query(self) -> str: + # TODO: add support for selecting and filtering by breakdowns + with self.timings.measure("persons_query"): + return parse_select( + """ + SELECT + person_id, start_of_period as breakdown_1, status as breakdown_2 + FROM + {events_query} + """, + placeholders={"events_query": self.events_query}, + ) + + def run(self) -> LifecycleQueryResponse: + response = execute_hogql_query( + query_type="LifecycleQuery", + query=self.to_query(), + team=self.team, + timings=self.timings, + ) + + # TODO: can we move the data conversion part into the query as well? It would make it easier to swap + # e.g. the LifecycleQuery with HogQLQuery, while keeping the chart logic the same. + + # ensure that the items are in a deterministic order + order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4} + results = sorted(response.results, key=lambda result: order.get(result[2], 5)) + + res = [] + for val in results: + counts = val[1] + labels = [ + item.strftime("%-d-%b-%Y{}".format(" %H:%M" if self.query_date_range.interval_name == "hour" else "")) + for item in val[0] + ] + days = [ + item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if self.query_date_range.interval_name == "hour" else "")) + for item in val[0] + ] + + label = "{} - {}".format("", val[2]) # entity.name + additional_values = {"label": label, "status": val[2]} + res.append( + { + "data": [float(c) for c in counts], + "count": float(sum(counts)), + "labels": labels, + "days": days, + **additional_values, + } + ) + + return LifecycleQueryResponse(result=res, timings=response.timings) + + @cached_property + def query_date_range(self): + return QueryDateRange( + date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + ) + + @cached_property + def event_filter(self) -> ast.Expr: + event_filters: List[ast.Expr] = [] + with self.timings.measure("date_range"): + event_filters.append( + parse_expr( + "timestamp >= dateTrunc({interval}, {date_from}) - {one_interval}", + { + "interval": self.query_date_range.interval_period_string_as_hogql_constant(), + "one_interval": self.query_date_range.one_interval_period(), + "date_from": self.query_date_range.date_from_as_hogql(), + }, + timings=self.timings, + ) + ) + event_filters.append( + parse_expr( + "timestamp < dateTrunc({interval}, {date_to}) + {one_interval}", + { + "interval": self.query_date_range.interval_period_string_as_hogql_constant(), + "one_interval": self.query_date_range.one_interval_period(), + "date_to": self.query_date_range.date_to_as_hogql(), + }, + timings=self.timings, + ) + ) + with self.timings.measure("properties"): + if self.query.properties is not None and self.query.properties != []: + event_filters.append(property_to_expr(self.query.properties, self.team)) + with self.timings.measure("series_filters"): + for serie in self.query.series or []: + if isinstance(serie, ActionsNode): + action = Action.objects.get(pk=int(serie.id), team=self.team) + event_filters.append(action_to_expr(action)) + elif isinstance(serie, EventsNode): + if serie.event is not None: + event_filters.append( + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=["event"]), + right=ast.Constant(value=str(serie.event)), + ) + ) + else: + raise ValueError(f"Invalid serie kind: {serie.kind}") + if serie.properties is not None and serie.properties != []: + event_filters.append(property_to_expr(serie.properties, self.team)) + with self.timings.measure("test_account_filters"): + if ( + self.query.filterTestAccounts + and isinstance(self.team.test_account_filters, list) + and len(self.team.test_account_filters) > 0 + ): + for property in self.team.test_account_filters: + event_filters.append(property_to_expr(property, self.team)) + + if len(event_filters) == 0: + return ast.Constant(value=True) + elif len(event_filters) == 1: + return event_filters[0] + else: + return ast.And(exprs=event_filters) + + @cached_property + def events_query(self): + with self.timings.measure("events_query"): + events_query = parse_select( + """ + SELECT + events.person.id as person_id, + min(events.person.created_at) AS created_at, + arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity, + arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity, + arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity, + arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status, + arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status, + arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods, + arrayMap(x -> 'dormant', dormant_periods) as dormant_label, + arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat, + arrayJoin(temp_concat) as period_status_pairs, + period_status_pairs.1 as start_of_period, + period_status_pairs.2 as status + FROM events + WHERE {event_filter} + GROUP BY person_id + """, + placeholders={ + **self.query_date_range.to_placeholders(), + "event_filter": self.event_filter, + }, + timings=self.timings, + ) + sampling_factor = self.query.samplingFactor + if sampling_factor is not None and isinstance(sampling_factor, float): + sample_expr = ast.SampleExpr(sample_value=ast.RatioExpr(left=ast.Constant(value=sampling_factor))) + events_query.select_from.sample = sample_expr + + return events_query + + @cached_property + def periods_query(self): + with self.timings.measure("periods_query"): + periods_query = parse_select( + """ + SELECT ( + dateTrunc({interval}, {date_to}) - {number_interval_period} + ) AS start_of_period + FROM numbers( + dateDiff( + {interval}, + dateTrunc({interval}, {date_from}), + dateTrunc({interval}, {date_to} + {one_interval_period}) + ) + ) + """, + placeholders=self.query_date_range.to_placeholders(), + timings=self.timings, + ) + return periods_query diff --git a/posthog/hogql_queries/query_date_range.py b/posthog/hogql_queries/query_date_range.py deleted file mode 100644 index 4d76b222deb2b..0000000000000 --- a/posthog/hogql_queries/query_date_range.py +++ /dev/null @@ -1,114 +0,0 @@ -from datetime import datetime -from functools import cached_property, lru_cache -from typing import Optional - -import pytz -from dateutil.relativedelta import relativedelta - -from posthog.hogql.parser import parse_expr, ast -from posthog.models.team import Team -from posthog.queries.util import get_earliest_timestamp -from posthog.schema import DateRange, IntervalType -from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping - - -# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries -class QueryDateRange: - """Translation of the raw `date_from` and `date_to` filter values to datetimes.""" - - _team: Team - _date_range: Optional[DateRange] - _interval: Optional[IntervalType] - _now_non_timezone: datetime - - def __init__( - self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime - ) -> None: - self._team = team - self._date_range = date_range - self._interval = interval - self._now_non_timezone = now - - @cached_property - def date_to(self) -> datetime: - date_to = self._now - delta_mapping = None - - if self._date_range and self._date_range.date_to: - date_to, delta_mapping = relative_date_parse_with_delta_mapping( - self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self._now - ) - - is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None - if not self.is_hourly(): - date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999) - elif is_relative: - date_to = date_to.replace(minute=59, second=59, microsecond=999999) - - return date_to - - def get_earliest_timestamp(self): - return get_earliest_timestamp(self._team.pk) - - @cached_property - def date_from(self) -> datetime: - date_from: datetime - if self._date_range and self._date_range.date_from == "all": - date_from = self.get_earliest_timestamp() - elif self._date_range and isinstance(self._date_range.date_from, str): - date_from = relative_date_parse(self._date_range.date_from, self._team.timezone_info, now=self._now) - else: - date_from = self._now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta( - days=DEFAULT_DATE_FROM_DAYS - ) - - if not self.is_hourly(): - date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0) - - return date_from - - @cached_property - def _now(self): - return self._localize_to_team(self._now_non_timezone) - - def _localize_to_team(self, target: datetime): - return target.astimezone(pytz.timezone(self._team.timezone)) - - @cached_property - def date_to_str(self) -> str: - return self.date_to.strftime("%Y-%m-%d %H:%M:%S") - - @cached_property - def date_from_str(self) -> str: - return self.date_from.strftime("%Y-%m-%d %H:%M:%S") - - def is_hourly(self): - return self.interval.name == "hour" - - @cached_property - def date_to_as_hogql(self): - return parse_expr(f"assumeNotNull(toDateTime('{self.date_to_str}'))") - - @cached_property - def date_from_as_hogql(self): - return parse_expr(f"assumeNotNull(toDateTime('{self.date_from_str}'))") - - @cached_property - def interval(self): - return self._interval or IntervalType.day - - @cached_property - def one_interval_period_as_hogql(self): - return parse_expr(f"toInterval{self.interval.capitalize()}(1)") - - @lru_cache - def interval_periods_as_hogql(self, s: str): - return parse_expr(f"toInterval{self.interval.capitalize()}({s})") - - @cached_property - def interval_period_string(self): - return self.interval.value - - @cached_property - def interval_period_string_as_hogql(self): - return ast.Constant(value=self.interval.value) diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py new file mode 100644 index 0000000000000..b8a3a10a4aa7b --- /dev/null +++ b/posthog/hogql_queries/query_runner.py @@ -0,0 +1,37 @@ +from typing import Optional + +from pydantic import BaseModel + +from posthog.hogql import ast +from posthog.hogql.context import HogQLContext +from posthog.hogql.printer import print_ast +from posthog.hogql.timings import HogQLTimings +from posthog.models import Team + + +class QueryRunner: + query: BaseModel + team: Team + timings: HogQLTimings + + def __init__(self, team: Team, timings: Optional[HogQLTimings] = None): + self.team = team + self.timings = timings or HogQLTimings() + + def run(self) -> BaseModel: + raise NotImplementedError() + + def to_query(self) -> ast.SelectQuery: + raise NotImplementedError() + + def to_persons_query(self) -> str: + # TODO: add support for selecting and filtering by breakdowns + raise NotImplementedError() + + def to_hogql(self) -> str: + with self.timings.measure("to_hogql"): + return print_ast( + self.to_query(), + HogQLContext(team_id=self.team.pk, enable_select_queries=True, timings=self.timings), + "hogql", + ) diff --git a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py index 5cc56252b046f..d9996640f64c3 100644 --- a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py +++ b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py @@ -3,9 +3,8 @@ from freezegun import freeze_time from posthog.hogql.query import execute_hogql_query +from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner from posthog.models.utils import UUIDT -from posthog.hogql_queries.lifecycle_hogql_query import create_events_query, create_time_filter, run_lifecycle_query -from posthog.hogql_queries.query_date_range import QueryDateRange from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events @@ -67,105 +66,29 @@ def _create_test_events(self): ] ) - def _run_events_query(self, date_from, date_to, interval): - date_range = QueryDateRange( - date_range=DateRange(date_from=date_from, date_to=date_to), - team=self.team, - interval=interval, - now=datetime.strptime("2020-01-30T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"), + def _create_query_runner(self, date_from, date_to, interval) -> LifecycleQueryRunner: + series = [EventsNode(event="$pageview")] + query = LifecycleQuery( + dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series ) - time_filter = create_time_filter(date_range) + return LifecycleQueryRunner(team=self.team, query=query) - # TODO probably doesn't make sense to test like this - # maybe this query should be what is returned by the function - events_query = create_events_query(event_filter=time_filter, date_range=date_range) + def _run_events_query(self, date_from, date_to, interval): + events_query = self._create_query_runner(date_from, date_to, interval).events_query return execute_hogql_query( team=self.team, query=""" - SELECT - start_of_period, count(DISTINCT person_id) AS counts, status - FROM {events_query} - GROUP BY start_of_period, status - """, - query_type="LifecycleQuery", + SELECT + start_of_period, count(DISTINCT person_id) AS counts, status + FROM {events_query} + GROUP BY start_of_period, status + """, placeholders={"events_query": events_query}, + query_type="LifecycleEventsQuery", ) - def test_events_query_whole_range(self): - self._create_test_events() - - date_from = "2020-01-09" - date_to = "2020-01-19" - - response = self._run_events_query(date_from, date_to, IntervalType.day) - - self.assertEqual( - { - (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2 - (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2 - (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 - (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4 - (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4 - (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1 - (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1 - (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1 - }, - set(response.results), - ) - - def test_events_query_partial_range(self): - self._create_test_events() - date_from = "2020-01-12" - date_to = "2020-01-14" - response = self._run_events_query(date_from, date_to, IntervalType.day) - - self.assertEqual( - { - (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 - (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - }, - set(response.results), - ) - - # def test_start_on_dormant(self): - # self.create_test_events() - # date_from = "2020-01-13" - # date_to = "2020-01-14" - # response = self.run_events_query(date_from, date_to, IntervalType.day) - # - # self.assertEqual( - # { - # (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 - # # TODO this currently fails, as it treats p1 as resurrecting. - # # This might just be fine, later in the query we would just throw away results before the 13th - # (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 - # (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 - # (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 - # (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 - # (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 - # }, - # set(response.results), - # ) - def _run_lifecycle_query(self, date_from, date_to, interval): - series = [EventsNode(event="$pageview")] - query = LifecycleQuery( - dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series - ) - return run_lifecycle_query(team=self.team, query=query) + return self._create_query_runner(date_from, date_to, interval).run() def test_lifecycle_query_whole_range(self): self._create_test_events() @@ -175,7 +98,7 @@ def test_lifecycle_query_whole_range(self): response = self._run_lifecycle_query(date_from, date_to, IntervalType.day) - statuses = [res["status"] for res in response["result"]] + statuses = [res["status"] for res in response.result] self.assertEqual(["new", "returning", "resurrecting", "dormant"], statuses) self.assertEqual( @@ -357,5 +280,54 @@ def test_lifecycle_query_whole_range(self): "status": "dormant", }, ], - response["result"], + response.result, + ) + + def test_events_query_whole_range(self): + self._create_test_events() + + date_from = "2020-01-09" + date_to = "2020-01-19" + + response = self._run_events_query(date_from, date_to, IntervalType.day) + + self.assertEqual( + { + (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2 + (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2 + (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 + (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 + (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 + (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 + (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 + (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 + (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 + (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1 + (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4 + (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4 + (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1 + (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1 + (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1 + (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1 + }, + set(response.results), + ) + + def test_events_query_partial_range(self): + self._create_test_events() + date_from = "2020-01-12" + date_to = "2020-01-14" + response = self._run_events_query(date_from, date_to, IntervalType.day) + + self.assertEqual( + { + (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1 + (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3 + (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2 + (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1 + (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1 + (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3 + (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1 + }, + set(response.results), ) diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py new file mode 100644 index 0000000000000..a9c86614cac5f --- /dev/null +++ b/posthog/hogql_queries/utils/query_date_range.py @@ -0,0 +1,124 @@ +import re +from functools import cached_property +from datetime import datetime +from typing import Optional, Dict +from zoneinfo import ZoneInfo + +from dateutil.relativedelta import relativedelta + +from posthog.hogql.parser import ast +from posthog.models.team import Team +from posthog.queries.util import get_earliest_timestamp +from posthog.schema import DateRange, IntervalType +from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping + + +# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries +class QueryDateRange: + """Translation of the raw `date_from` and `date_to` filter values to datetimes.""" + + _team: Team + _date_range: Optional[DateRange] + _interval: Optional[IntervalType] + _now_without_timezone: datetime + + def __init__( + self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime + ) -> None: + self._team = team + self._date_range = date_range + self._interval = interval or IntervalType.day + self._now_without_timezone = now + + if not isinstance(self._interval, IntervalType) or re.match(r"[^a-z]", self._interval.name): + raise ValueError(f"Invalid interval: {interval}") + + def date_to(self) -> datetime: + date_to = self.now_with_timezone + delta_mapping = None + + if self._date_range and self._date_range.date_to: + date_to, delta_mapping = relative_date_parse_with_delta_mapping( + self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone + ) + + is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None + if not self.is_hourly: + date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999) + elif is_relative: + date_to = date_to.replace(minute=59, second=59, microsecond=999999) + + return date_to + + def get_earliest_timestamp(self) -> datetime: + return get_earliest_timestamp(self._team.pk) + + def date_from(self) -> datetime: + date_from: datetime + if self._date_range and self._date_range.date_from == "all": + date_from = self.get_earliest_timestamp() + elif self._date_range and isinstance(self._date_range.date_from, str): + date_from = relative_date_parse( + self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone + ) + else: + date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta( + days=DEFAULT_DATE_FROM_DAYS + ) + + if not self.is_hourly: + date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0) + + return date_from + + @cached_property + def now_with_timezone(self) -> datetime: + return self._now_without_timezone.astimezone(ZoneInfo(self._team.timezone)) + + @cached_property + def date_to_str(self) -> str: + return self.date_to().strftime("%Y-%m-%d %H:%M:%S") + + @cached_property + def date_from_str(self) -> str: + return self.date_from().strftime("%Y-%m-%d %H:%M:%S") + + @cached_property + def is_hourly(self) -> bool: + return self.interval_name == "hour" + + @cached_property + def interval_type(self) -> IntervalType: + return self._interval or IntervalType.day + + @cached_property + def interval_name(self) -> str: + return self.interval_type.name + + def date_to_as_hogql(self) -> ast.Expr: + return ast.Call( + name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])] + ) + + def date_from_as_hogql(self) -> ast.Expr: + return ast.Call( + name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])] + ) + + def one_interval_period(self) -> ast.Expr: + return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)]) + + def number_interval_periods(self) -> ast.Expr: + return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])]) + + def interval_period_string_as_hogql_constant(self) -> ast.Expr: + return ast.Constant(value=self.interval_name) + + def to_placeholders(self) -> Dict[str, ast.Expr]: + return { + "interval": self.interval_period_string_as_hogql_constant(), + "one_interval_period": self.one_interval_period(), + "number_interval_period": self.number_interval_periods(), + "date_from": self.date_from_as_hogql(), + "date_to": self.date_to_as_hogql(), + } diff --git a/posthog/hogql_queries/test/test_query_date_range.py b/posthog/hogql_queries/utils/test/test_query_date_range.py similarity index 54% rename from posthog/hogql_queries/test/test_query_date_range.py rename to posthog/hogql_queries/utils/test/test_query_date_range.py index 82966cc5f1bff..0ab8467567a50 100644 --- a/posthog/hogql_queries/test/test_query_date_range.py +++ b/posthog/hogql_queries/utils/test/test_query_date_range.py @@ -1,6 +1,6 @@ from dateutil import parser -from posthog.hogql_queries.query_date_range import QueryDateRange +from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.schema import DateRange, IntervalType from posthog.test.base import APIBaseTest @@ -10,32 +10,17 @@ def test_parsed_date(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-23T00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25T23:59:59.999999Z"), - ) + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z")) + self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T23:59:59.999999Z")) def test_parsed_date_hour(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z")) self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-23T00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25T00:59:59.999999Z"), + query_date_range.date_to(), parser.isoparse("2021-08-25T00:59:59.999999Z") ) # ensure last hour is included def test_parsed_date_middle_of_hour(self): @@ -43,34 +28,25 @@ def test_parsed_date_middle_of_hour(self): date_range = DateRange(date_from="2021-08-23 05:00:00", date_to="2021-08-26 07:00:00") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - - self.assertEqual(parsed_date_from, parser.isoparse("2021-08-23 05:00:00Z")) - self.assertEqual(parsed_date_to, parser.isoparse("2021-08-26 07:00:00Z")) # ensure last hour is included + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23 05:00:00Z")) + self.assertEqual( + query_date_range.date_to(), parser.isoparse("2021-08-26 07:00:00Z") + ) # ensure last hour is included def test_parsed_date_week(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-7d") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.week, now=now) - parsed_date_from = query_date_range.date_from - parsed_date_to = query_date_range.date_to - self.assertEqual( - parsed_date_from, - parser.isoparse("2021-08-18 00:00:00Z"), - ) - self.assertEqual( - parsed_date_to, - parser.isoparse("2021-08-25 23:59:59.999999Z"), - ) + self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-18 00:00:00Z")) + self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25 23:59:59.999999Z")) def test_is_hourly(self): now = parser.isoparse("2021-08-25T00:00:00.000Z") date_range = DateRange(date_from="-48h") query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now) - self.assertFalse(query_date_range.is_hourly()) + self.assertFalse(query_date_range.is_hourly) query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now) - self.assertTrue(query_date_range.is_hourly()) + self.assertTrue(query_date_range.is_hourly) diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py index 20b0b4c89ca86..eadf71532db02 100644 --- a/posthog/management/commands/create_batch_export_from_app.py +++ b/posthog/management/commands/create_batch_export_from_app.py @@ -48,6 +48,12 @@ def add_arguments(self, parser): default=False, help="Backfill the newly created BatchExport with the last period of data.", ) + parser.add_argument( + "--migrate-disabled-plugin-config", + action="store_true", + default=False, + help="Migrate a PluginConfig even if its disabled.", + ) def handle(self, *args, **options): """Handle creation of a BatchExport from a given PluginConfig.""" @@ -82,8 +88,8 @@ def handle(self, *args, **options): "destination_data": destination_data, } - if dry_run is True: - self.stdout.write("No BatchExport will be created as this is a dry run or confirmation check rejected.") + if dry_run is True or (options["migrate_disabled_plugin_config"] is False and plugin_config.enabled is False): + self.stdout.write("No BatchExport will be created as this is a dry run or existing plugin is disabled.") return json.dumps(batch_export_data, indent=4, default=str) else: destination = BatchExportDestination(**batch_export_data["destination_data"]) diff --git a/posthog/management/commands/send_usage_report.py b/posthog/management/commands/send_usage_report.py index 4c67d451c2a8a..03e4b4a102da4 100644 --- a/posthog/management/commands/send_usage_report.py +++ b/posthog/management/commands/send_usage_report.py @@ -1,5 +1,3 @@ -import pprint - from django.core.management.base import BaseCommand from posthog.tasks.usage_report import send_all_org_usage_reports @@ -10,7 +8,6 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it") - parser.add_argument("--print-reports", type=bool, help="Print the reports in full") parser.add_argument("--date", type=str, help="The date to be ran in format YYYY-MM-DD") parser.add_argument("--event-name", type=str, help="Override the event name to be sent - for testing") parser.add_argument( @@ -28,20 +25,14 @@ def handle(self, *args, **options): run_async = options["async"] if run_async: - results = send_all_org_usage_reports.delay( + send_all_org_usage_reports.delay( dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id ) else: - results = send_all_org_usage_reports( + send_all_org_usage_reports( dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id ) - if options["print_reports"]: - print("") # noqa T201 - pprint.pprint(results) # noqa T203 - print("") # noqa T201 if dry_run: print("Dry run so not sent.") # noqa T201 - else: - print(f"{len(results)} Reports sent!") # noqa T201 print("Done!") # noqa T201 diff --git a/posthog/management/commands/test/test_create_batch_export_from_app.py b/posthog/management/commands/test/test_create_batch_export_from_app.py index 4a51975d86648..bbbb36079d013 100644 --- a/posthog/management/commands/test/test_create_batch_export_from_app.py +++ b/posthog/management/commands/test/test_create_batch_export_from_app.py @@ -1,4 +1,5 @@ import datetime as dt +import itertools import json import typing @@ -116,6 +117,20 @@ def plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginC raise ValueError(f"Unsupported plugin: {request.param}") +@pytest.fixture +def disabled_plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginConfig: + if request.param == "S3": + s3_plugin_config.enabled = False + s3_plugin_config.save() + return s3_plugin_config + elif request.param == "Snowflake": + snowflake_plugin_config.enabled = False + snowflake_plugin_config.save() + return snowflake_plugin_config + else: + raise ValueError(f"Unsupported plugin: {request.param}") + + @pytest.mark.django_db @pytest.mark.parametrize( "plugin_config,config,expected_type", @@ -155,7 +170,6 @@ def test_create_batch_export_from_app_fails_with_mismatched_team_id(plugin_confi @pytest.mark.parametrize("plugin_config", ["S3", "Snowflake"], indirect=True) def test_create_batch_export_from_app_dry_run(plugin_config): """Test a dry_run of the create_batch_export_from_app command.""" - output = call_command( "create_batch_export_from_app", f"--plugin-config-id={plugin_config.id}", @@ -166,6 +180,7 @@ def test_create_batch_export_from_app_dry_run(plugin_config): batch_export_data = json.loads(output) + assert "id" not in batch_export_data assert batch_export_data["team_id"] == plugin_config.team.id assert batch_export_data["interval"] == "hour" assert batch_export_data["name"] == f"{export_type} Export" @@ -178,19 +193,14 @@ def test_create_batch_export_from_app_dry_run(plugin_config): @pytest.mark.django_db @pytest.mark.parametrize( "interval,plugin_config,disable_plugin_config", - [ - ("hour", "S3", True), - ("hour", "S3", False), - ("day", "S3", True), - ("day", "S3", False), - ("hour", "Snowflake", True), - ("hour", "Snowflake", False), - ("day", "Snowflake", True), - ("day", "Snowflake", False), - ], + itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]), indirect=["plugin_config"], ) -def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_config): +def test_create_batch_export_from_app( + interval, + plugin_config, + disable_plugin_config, +): """Test a live run of the create_batch_export_from_app command.""" args = [ f"--plugin-config-id={plugin_config.id}", @@ -237,6 +247,69 @@ def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_co assert args[key] == expected +@pytest.mark.django_db +@pytest.mark.parametrize( + "interval,disabled_plugin_config,migrate_disabled_plugin_config", + itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]), + indirect=["disabled_plugin_config"], +) +def test_create_batch_export_from_app_with_disabled_plugin( + interval, + disabled_plugin_config, + migrate_disabled_plugin_config, +): + """Test a live run of the create_batch_export_from_app command.""" + args = [ + f"--plugin-config-id={disabled_plugin_config.id}", + f"--team-id={disabled_plugin_config.team.id}", + f"--interval={interval}", + ] + if migrate_disabled_plugin_config: + args.append("--migrate-disabled-plugin-config") + + output = call_command("create_batch_export_from_app", *args) + + disabled_plugin_config.refresh_from_db() + assert disabled_plugin_config.enabled is False + + export_type, config = map_plugin_config_to_destination(disabled_plugin_config) + + batch_export_data = json.loads(output) + + assert batch_export_data["team_id"] == disabled_plugin_config.team.id + assert batch_export_data["interval"] == interval + assert batch_export_data["name"] == f"{export_type} Export" + assert batch_export_data["destination_data"] == { + "type": export_type, + "config": config, + } + + if not migrate_disabled_plugin_config: + assert "id" not in batch_export_data + return + + assert "id" in batch_export_data + + temporal = sync_connect() + + schedule = describe_schedule(temporal, str(batch_export_data["id"])) + expected_interval = dt.timedelta(**{f"{interval}s": 1}) + assert schedule.schedule.spec.intervals[0].every == expected_interval + + codec = EncryptionCodec(settings=settings) + decoded_payload = async_to_sync(codec.decode)(schedule.schedule.action.args) + args = json.loads(decoded_payload[0].data) + + # Common inputs + assert args["team_id"] == disabled_plugin_config.team.pk + assert args["batch_export_id"] == str(batch_export_data["id"]) + assert args["interval"] == interval + + # Type specific inputs + for key, expected in config.items(): + assert args[key] == expected + + @async_to_sync async def list_workflows(temporal, schedule_id: str): """List Workflows scheduled by given Schedule.""" diff --git a/posthog/migrations/0348_alter_datawarehousetable_format.py b/posthog/migrations/0348_alter_datawarehousetable_format.py new file mode 100644 index 0000000000000..72434bbc99fdb --- /dev/null +++ b/posthog/migrations/0348_alter_datawarehousetable_format.py @@ -0,0 +1,20 @@ +# Generated by Django 3.2.19 on 2023-09-11 15:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("posthog", "0347_add_bigquery_export_type"), + ] + + operations = [ + migrations.AlterField( + model_name="datawarehousetable", + name="format", + field=models.CharField( + choices=[("CSV", "CSV"), ("Parquet", "Parquet"), ("JSONEachRow", "JSON")], max_length=128 + ), + ), + ] diff --git a/posthog/migrations/0349_update_survey_query_name.py b/posthog/migrations/0349_update_survey_query_name.py new file mode 100644 index 0000000000000..cbcbbb3a0c954 --- /dev/null +++ b/posthog/migrations/0349_update_survey_query_name.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.19 on 2023-09-12 10:35 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ("posthog", "0348_alter_datawarehousetable_format"), + ] + + operations = [ + migrations.AlterField( + model_name="survey", + name="linked_flag", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="surveys_linked_flag", + related_query_name="survey_linked_flag", + to="posthog.featureflag", + ), + ), + migrations.AlterField( + model_name="survey", + name="targeting_flag", + field=models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="surveys_targeting_flag", + related_query_name="survey_targeting_flag", + to="posthog.featureflag", + ), + ), + ] diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py new file mode 100644 index 0000000000000..bfe4b079b9945 --- /dev/null +++ b/posthog/migrations/0350_add_notebook_text_content.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.19 on 2023-09-12 18:09 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("posthog", "0349_update_survey_query_name"), + ] + + operations = [ + migrations.AddField( + model_name="notebook", + name="text_content", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index ba47b2c326ff1..f3b36e2c3dbd0 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -99,7 +99,7 @@ class Meta: field_exclusions: Dict[ActivityScope, List[str]] = { - "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by"], + "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"], "FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"], "Person": [ "id", diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py index 8b6a2fbd33d27..8f76e9f79fb91 100644 --- a/posthog/models/event/util.py +++ b/posthog/models/event/util.py @@ -3,7 +3,7 @@ import uuid from typing import Any, Dict, List, Optional, Set, Union -import pytz +from zoneinfo import ZoneInfo from dateutil.parser import isoparse from django.utils import timezone from rest_framework import serializers @@ -47,7 +47,7 @@ def create_event( timestamp = timezone.now() assert timestamp is not None - timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(pytz.utc) + timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(ZoneInfo("UTC")) elements_chain = "" if elements and len(elements) > 0: @@ -89,7 +89,9 @@ def format_clickhouse_timestamp( if default is None: default = timezone.now() parsed_datetime = ( - isoparse(raw_timestamp) if isinstance(raw_timestamp, str) else (raw_timestamp or default).astimezone(pytz.utc) + isoparse(raw_timestamp) + if isinstance(raw_timestamp, str) + else (raw_timestamp or default).astimezone(ZoneInfo("UTC")) ) return parsed_datetime.strftime("%Y-%m-%d %H:%M:%S.%f") @@ -110,16 +112,16 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di inserts = [] params: Dict[str, Any] = {} for index, event in enumerate(events): - datetime64_default_timestamp = timezone.now().astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S") + datetime64_default_timestamp = timezone.now().astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S") timestamp = event.get("timestamp") or dt.datetime.now() if isinstance(timestamp, str): timestamp = isoparse(timestamp) # Offset timezone-naive datetime by project timezone, to facilitate @also_test_with_different_timezones if timestamp.tzinfo is None: team_timezone = event["team"].timezone if event.get("team") else "UTC" - timestamp = pytz.timezone(team_timezone).localize(timestamp) + timestamp = timestamp.replace(tzinfo=ZoneInfo(team_timezone)) # Format for ClickHouse - timestamp = timestamp.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S.%f") + timestamp = timestamp.astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S.%f") elements_chain = "" if event.get("elements") and len(event["elements"]) > 0: diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py index 9579ab7a5a782..5287747ea98f5 100644 --- a/posthog/models/feedback/survey.py +++ b/posthog/models/feedback/survey.py @@ -24,7 +24,7 @@ class Meta: blank=True, on_delete=models.SET_NULL, related_name="surveys_linked_flag", - related_query_name="survey", + related_query_name="survey_linked_flag", ) targeting_flag: models.ForeignKey = models.ForeignKey( "posthog.FeatureFlag", @@ -32,7 +32,7 @@ class Meta: blank=True, on_delete=models.SET_NULL, related_name="surveys_targeting_flag", - related_query_name="survey", + related_query_name="survey_targeting_flag", ) type: models.CharField = models.CharField(max_length=40, choices=SurveyType.choices) diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py index 530b7b83b9d13..bbb727407c6be 100644 --- a/posthog/models/filters/mixins/common.py +++ b/posthog/models/filters/mixins/common.py @@ -4,7 +4,7 @@ from math import ceil from typing import Any, Dict, List, Literal, Optional, Union, cast -import pytz +from zoneinfo import ZoneInfo from dateutil.relativedelta import relativedelta from django.utils import timezone from rest_framework.exceptions import ValidationError @@ -361,11 +361,13 @@ def date_to(self) -> datetime.datetime: if isinstance(self._date_to, str): try: return datetime.datetime.strptime(self._date_to, "%Y-%m-%d").replace( - hour=23, minute=59, second=59, microsecond=999999, tzinfo=pytz.UTC + hour=23, minute=59, second=59, microsecond=999999, tzinfo=ZoneInfo("UTC") ) except ValueError: try: - return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC) + return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace( + tzinfo=ZoneInfo("UTC") + ) except ValueError: date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_to, self.team.timezone_info, always_truncate=True) # type: ignore self.date_to_delta_mapping = delta_mapping diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py index a6d38bf76e7ce..53146bf62a7b3 100644 --- a/posthog/models/filters/mixins/retention.py +++ b/posthog/models/filters/mixins/retention.py @@ -90,7 +90,7 @@ def date_to(self) -> datetime: date_to = date_to + self.period_increment if self.period == "Hour": - return date_to + return date_to.replace(minute=0, second=0, microsecond=0) else: return date_to.replace(hour=0, minute=0, second=0, microsecond=0) diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 922fdf12a27f1..9be8465ff5f0f 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -11,6 +11,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -22,6 +23,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -60,6 +62,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -71,6 +74,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -109,6 +113,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -120,6 +125,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -158,6 +164,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -169,6 +176,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", @@ -207,6 +215,7 @@ "posthog_team"."updated_at", "posthog_team"."anonymize_ips", "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", "posthog_team"."ingested_event", "posthog_team"."autocapture_opt_out", "posthog_team"."autocapture_exceptions_opt_in", @@ -218,6 +227,7 @@ "posthog_team"."signup_token", "posthog_team"."is_demo", "posthog_team"."access_control", + "posthog_team"."week_start_day", "posthog_team"."inject_web_apps", "posthog_team"."test_account_filters", "posthog_team"."test_account_filters_default_checked", diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py index 1fcf975ca70b2..fa3520dc9912c 100644 --- a/posthog/models/group/util.py +++ b/posthog/models/group/util.py @@ -2,7 +2,7 @@ import json from typing import Dict, Optional, Union -import pytz +from zoneinfo import ZoneInfo from dateutil.parser import isoparse from django.utils.timezone import now @@ -27,7 +27,7 @@ def raw_create_group_ch( DON'T USE DIRECTLY - `create_group` is the correct option, unless you specifically want to sync Postgres state from ClickHouse yourself.""" if timestamp is None: - timestamp = now().astimezone(pytz.utc) + timestamp = now().astimezone(ZoneInfo("UTC")) data = { "group_type_index": group_type_index, "group_key": group_key, @@ -58,7 +58,7 @@ def create_group( if isinstance(timestamp, str): timestamp = isoparse(timestamp) else: - timestamp = timestamp.astimezone(pytz.utc) + timestamp = timestamp.astimezone(ZoneInfo("UTC")) raw_create_group_ch(team_id, group_type_index, group_key, properties, timestamp, timestamp=timestamp, sync=sync) group = Group.objects.create( diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py index dde92fddab944..490645909df26 100644 --- a/posthog/models/notebook/notebook.py +++ b/posthog/models/notebook/notebook.py @@ -12,6 +12,7 @@ class Notebook(UUIDModel): team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) title: models.CharField = models.CharField(max_length=256, blank=True, null=True) content: JSONField = JSONField(default=None, null=True, blank=True) + text_content: models.TextField = models.TextField(blank=True, null=True) deleted: models.BooleanField = models.BooleanField(default=False) version: models.IntegerField = models.IntegerField(default=0) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py index 0be065a3258fb..9af13bc6e9d05 100644 --- a/posthog/models/person/util.py +++ b/posthog/models/person/util.py @@ -4,7 +4,7 @@ from typing import Dict, List, Optional, Union from uuid import UUID -import pytz +from zoneinfo import ZoneInfo from dateutil.parser import isoparse from django.db.models.query import QuerySet from django.db.models.signals import post_delete, post_save @@ -124,12 +124,12 @@ def create_person( if isinstance(timestamp, str): timestamp = isoparse(timestamp) else: - timestamp = timestamp.astimezone(pytz.utc) + timestamp = timestamp.astimezone(ZoneInfo("UTC")) if created_at is None: created_at = timestamp else: - created_at = created_at.astimezone(pytz.utc) + created_at = created_at.astimezone(ZoneInfo("UTC")) data = { "id": str(uuid), diff --git a/posthog/models/session_replay_event/migrations_sql.py b/posthog/models/session_replay_event/migrations_sql.py index 09f4e300be624..b11f5581c930f 100644 --- a/posthog/models/session_replay_event/migrations_sql.py +++ b/posthog/models/session_replay_event/migrations_sql.py @@ -65,3 +65,29 @@ table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(), cluster=settings.CLICKHOUSE_CLUSTER, ) + +# migration to add size column to the session replay table +ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN = """ + ALTER TABLE {table_name} on CLUSTER '{cluster}' + ADD COLUMN IF NOT EXISTS message_count SimpleAggregateFunction(sum, Int64), + ADD COLUMN IF NOT EXISTS event_count SimpleAggregateFunction(sum, Int64), + -- fly by addition so that we can track lag in the data the same way as for other tables + ADD COLUMN IF NOT EXISTS _timestamp SimpleAggregateFunction(max, DateTime) +""" + +ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = ( + lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format( + table_name="session_replay_events", + cluster=settings.CLICKHOUSE_CLUSTER, + ) +) + +ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format( + table_name="writable_session_replay_events", + cluster=settings.CLICKHOUSE_CLUSTER, +) + +ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format( + table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(), + cluster=settings.CLICKHOUSE_CLUSTER, +) diff --git a/posthog/models/session_replay_event/sql.py b/posthog/models/session_replay_event/sql.py index 1221fd80bb6de..dfe839843979f 100644 --- a/posthog/models/session_replay_event/sql.py +++ b/posthog/models/session_replay_event/sql.py @@ -27,7 +27,9 @@ console_log_count Int64, console_warn_count Int64, console_error_count Int64, - size Int64 + size Int64, + event_count Int64, + message_count Int64 ) ENGINE = {engine} """ @@ -53,7 +55,15 @@ console_warn_count SimpleAggregateFunction(sum, Int64), console_error_count SimpleAggregateFunction(sum, Int64), -- this column allows us to estimate the amount of data that is being ingested - size SimpleAggregateFunction(sum, Int64) + size SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of messages received in a session + -- often very useful in incidents or debugging + message_count SimpleAggregateFunction(sum, Int64), + -- this allows us to count the number of snapshot events received in a session + -- often very useful in incidents or debugging + -- because we batch events we expect message_count to be lower than event_count + event_count SimpleAggregateFunction(sum, Int64), + _timestamp SimpleAggregateFunction(max, DateTime) ) ENGINE = {engine} """ @@ -117,7 +127,11 @@ sum(console_log_count) as console_log_count, sum(console_warn_count) as console_warn_count, sum(console_error_count) as console_error_count, -sum(size) as size +sum(size) as size, +-- we can count the number of kafka messages instead of sending it explicitly +sum(message_count) as message_count, +sum(event_count) as event_count, +max(_timestamp) as _timestamp FROM {database}.kafka_session_replay_events group by session_id, team_id """.format( diff --git a/posthog/models/test/test_subscription_model.py b/posthog/models/test/test_subscription_model.py index 232b6c99791cc..bc9bf583e6f15 100644 --- a/posthog/models/test/test_subscription_model.py +++ b/posthog/models/test/test_subscription_model.py @@ -3,7 +3,7 @@ import jwt import pytest -import pytz +from zoneinfo import ZoneInfo from django.conf import settings from django.utils import timezone from freezegun import freeze_time @@ -33,7 +33,7 @@ def _create_insight_subscription(self, **kwargs): target_value="tests@posthog.com", frequency="weekly", interval=2, - start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC), + start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")), ) params.update(**kwargs) @@ -44,8 +44,8 @@ def test_creation(self): subscription.save() assert subscription.title == "My Subscription" - subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=pytz.UTC)) - assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=pytz.UTC) + subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC"))) + assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=ZoneInfo("UTC")) def test_update_next_delivery_date_on_save(self): subscription = self._create_insight_subscription() @@ -60,7 +60,7 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self): old_date = subscription.next_delivery_date # Change a property that does affect it - subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC) + subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")) subscription.save() assert old_date != subscription.next_delivery_date old_date = subscription.next_delivery_date @@ -72,7 +72,6 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self): assert old_date == subscription.next_delivery_date def test_generating_token(self): - subscription = self._create_insight_subscription( target_value="test1@posthog.com,test2@posthog.com,test3@posthog.com" ) @@ -143,13 +142,13 @@ def test_complex_rrule_configuration(self): # Last wed or fri of 01.22 is Wed 28th subscription.save() - assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=ZoneInfo("UTC")) # Last wed or fri of 01.22 is Wed 30th subscription.set_next_delivery_date(subscription.next_delivery_date) - assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=ZoneInfo("UTC")) # Last wed or fri of 01.22 is Fri 27th subscription.set_next_delivery_date(subscription.next_delivery_date) - assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=ZoneInfo("UTC")) def test_should_work_for_nth_days(self): # Equivalent to last monday and wednesday of every other month @@ -160,15 +159,15 @@ def test_should_work_for_nth_days(self): byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"], ) subscription.save() - assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC")) subscription.set_next_delivery_date(subscription.next_delivery_date) - assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC")) def test_should_ignore_bysetpos_if_missing_weeekday(self): # Equivalent to last monday and wednesday of every other month subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=3) subscription.save() - assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=pytz.UTC) + assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC")) def test_subscription_summary(self): subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=None) diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py index 4b7f2864546ad..484f01546001b 100644 --- a/posthog/queries/app_metrics/historical_exports.py +++ b/posthog/queries/app_metrics/historical_exports.py @@ -2,7 +2,7 @@ from datetime import timedelta from typing import Dict, Optional -import pytz +from zoneinfo import ZoneInfo from posthog.models.activity_logging.activity_log import ActivityLog from posthog.models.plugin import PluginStorage @@ -65,10 +65,12 @@ def historical_export_metrics(team: Team, plugin_config_id: int, job_id: str): filter_data = { "category": "exportEvents", "job_id": job_id, - "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(pytz.utc).isoformat(), + "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat(), } if "finished_at" in export_summary: - filter_data["date_to"] = (export_summary["finished_at"] + timedelta(hours=1)).astimezone(pytz.utc).isoformat() + filter_data["date_to"] = ( + (export_summary["finished_at"] + timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat() + ) filter = AppMetricsRequestSerializer(data=filter_data) filter.is_valid(raise_exception=True) diff --git a/posthog/queries/funnels/test/test_funnel_trends.py b/posthog/queries/funnels/test/test_funnel_trends.py index 1cb191c017ad2..12e8b81af02a5 100644 --- a/posthog/queries/funnels/test/test_funnel_trends.py +++ b/posthog/queries/funnels/test/test_funnel_trends.py @@ -1,6 +1,6 @@ from datetime import date, datetime, timedelta -import pytz +from zoneinfo import ZoneInfo from freezegun.api import freeze_time from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType @@ -113,43 +113,43 @@ def test_only_one_user_reached_one_step(self): "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 1, - "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, { "reached_to_step_count": 0, "conversion_rate": 0, "reached_from_step_count": 0, - "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=pytz.UTC), + "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")), }, ], ) @@ -531,8 +531,8 @@ def test_period_not_final(self): self.assertEqual(day["reached_to_step_count"], 0) self.assertEqual(day["conversion_rate"], 0) self.assertEqual( - day["timestamp"].replace(tzinfo=pytz.UTC), - (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=pytz.UTC), + day["timestamp"].replace(tzinfo=ZoneInfo("UTC")), + (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=ZoneInfo("UTC")), ) day = results[1] # today @@ -540,7 +540,8 @@ def test_period_not_final(self): self.assertEqual(day["reached_to_step_count"], 1) self.assertEqual(day["conversion_rate"], 100) self.assertEqual( - day["timestamp"].replace(tzinfo=pytz.UTC), datetime(now.year, now.month, now.day).replace(tzinfo=pytz.UTC) + day["timestamp"].replace(tzinfo=ZoneInfo("UTC")), + datetime(now.year, now.month, now.day).replace(tzinfo=ZoneInfo("UTC")), ) def test_two_runs_by_single_user_in_one_period(self): diff --git a/posthog/queries/properties_timeline/properties_timeline_event_query.py b/posthog/queries/properties_timeline/properties_timeline_event_query.py index 5f35a5d91869a..d3ca17eb70091 100644 --- a/posthog/queries/properties_timeline/properties_timeline_event_query.py +++ b/posthog/queries/properties_timeline/properties_timeline_event_query.py @@ -1,7 +1,6 @@ import datetime as dt from typing import Any, Dict, Optional, Tuple - -import pytz +from zoneinfo import ZoneInfo from posthog.models.entity.util import get_entity_filtering_params from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter @@ -76,7 +75,7 @@ def _determine_should_join_sessions(self) -> None: def _get_date_filter(self) -> Tuple[str, Dict]: query_params: Dict[str, Any] = {} query_date_range = QueryDateRange(self._filter, self._team) - effective_timezone = pytz.timezone(self._team.timezone) + effective_timezone = ZoneInfo(self._team.timezone) # Get effective date range from QueryDateRange # We need to explicitly replace tzinfo in those datetimes with the team's timezone, because QueryDateRange # does not reliably make those datetimes timezone-aware. That's annoying, but it'd be a significant effort diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py index 927d2766a2358..208bf0207843d 100644 --- a/posthog/queries/query_date_range.py +++ b/posthog/queries/query_date_range.py @@ -1,8 +1,8 @@ from datetime import datetime, timedelta from functools import cached_property from typing import Dict, Literal, Optional, Tuple +from zoneinfo import ZoneInfo -import pytz from dateutil.relativedelta import relativedelta from django.utils import timezone from posthog.models.filters.base_filter import BaseFilter @@ -82,7 +82,7 @@ def _now(self): return self._localize_to_team(timezone.now()) def _localize_to_team(self, target: datetime): - return target.astimezone(pytz.timezone(self._team.timezone)) + return target.astimezone(ZoneInfo(self._team.timezone)) @cached_property def date_to_clause(self): diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py index dc8f41175521d..145ee1404c37b 100644 --- a/posthog/queries/retention/retention.py +++ b/posthog/queries/retention/retention.py @@ -1,7 +1,6 @@ from typing import Any, Dict, List, Optional, Tuple from urllib.parse import urlencode - -import pytz +from zoneinfo import ZoneInfo from posthog.constants import RETENTION_FIRST_TIME, RetentionQueryType from posthog.models.filters.retention_filter import RetentionFilter @@ -33,7 +32,6 @@ def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> List[Dict def _get_retention_by_breakdown_values( self, filter: RetentionFilter, team: Team ) -> Dict[CohortKey, Dict[str, Any]]: - actor_query, actor_query_params = build_actor_activity_query( filter=filter, team=team, retention_events_query=self.event_query ) @@ -109,11 +107,8 @@ def construct_url(first_day): for day in range(filter.total_intervals - first_day) ], "label": "{} {}".format(filter.period, first_day), - "date": pytz.timezone(team.timezone).localize( - (filter.date_from + RetentionFilter.determine_time_delta(first_day, filter.period)[0]).replace( - tzinfo=None - ) - ), + "date": filter.date_from.replace(tzinfo=ZoneInfo(team.timezone)) + + RetentionFilter.determine_time_delta(first_day, filter.period)[0], "people_url": construct_url(first_day), } for first_day in range(filter.total_intervals) diff --git a/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py b/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py index 706fb958b08b9..fea81ced5d0eb 100644 --- a/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py +++ b/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py @@ -1,13 +1,13 @@ import dataclasses -import datetime import re -from datetime import timedelta -from typing import Any, Dict, List, NamedTuple, Tuple, Union -from typing import Literal +from datetime import datetime, timedelta +from typing import Any, Dict, List, Literal, NamedTuple, Tuple, Union + +from django.conf import settings from posthog.client import sync_execute -from posthog.constants import PropertyOperatorType -from posthog.constants import TREND_FILTER_TYPE_ACTIONS +from posthog.cloud_utils import is_cloud +from posthog.constants import TREND_FILTER_TYPE_ACTIONS, AvailableFeature, PropertyOperatorType from posthog.models import Entity from posthog.models.action.util import format_entity_filter from posthog.models.filters.mixins.utils import cached_property @@ -16,6 +16,7 @@ from posthog.models.property import PropertyGroup from posthog.models.property.util import parse_prop_grouped_clauses from posthog.models.team import PersonOnEventsMode +from posthog.models.team.team import Team from posthog.queries.event_query import EventQuery from posthog.queries.util import PersonPropertiesMode @@ -54,6 +55,22 @@ def _get_filter_by_provided_session_ids_clause( return f'AND "{column_name}" in %(session_ids)s', {"session_ids": recording_filters.session_ids} +def ttl_days(team: Team) -> int: + ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 + if is_cloud(): + # NOTE: We use Playlists as a proxy to see if they are subbed to Recordings + is_paid = team.organization.is_feature_available(AvailableFeature.RECORDINGS_PLAYLISTS) + ttl_days = settings.REPLAY_RETENTION_DAYS_MAX if is_paid else settings.REPLAY_RETENTION_DAYS_MIN + + # NOTE: The date we started reliably ingested data to blob storage + days_since_blob_ingestion = (datetime.now() - datetime(2023, 8, 1)).days + + if days_since_blob_ingestion < ttl_days: + ttl_days = days_since_blob_ingestion + + return ttl_days + + class PersonsQuery(EventQuery): _filter: SessionRecordingsFilter @@ -162,7 +179,10 @@ def __init__( super().__init__( **kwargs, ) - self.ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 + + @property + def ttl_days(self): + return ttl_days(self._team) _raw_events_query = """ SELECT @@ -276,7 +296,7 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any] base_params = { "team_id": self._team_id, - "clamped_to_storage_ttl": (datetime.datetime.now() - datetime.timedelta(days=self.ttl_days)), + "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)), } _, recording_start_time_params = _get_recording_start_time_clause(self._filter) @@ -381,7 +401,10 @@ def __init__( super().__init__( **kwargs, ) - self.ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 + + @property + def ttl_days(self): + return ttl_days(self._team) _session_recordings_query: str = """ SELECT @@ -471,7 +494,7 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: "team_id": self._team_id, "limit": self.limit + 1, "offset": offset, - "clamped_to_storage_ttl": (datetime.datetime.now() - datetime.timedelta(days=self.ttl_days)), + "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)), } _, recording_start_time_params = _get_recording_start_time_clause(self._filter) diff --git a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py b/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py index 9f920fd5aa523..88484f316e150 100644 --- a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py +++ b/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py @@ -6,6 +6,8 @@ from freezegun.api import freeze_time from posthog.clickhouse.client import sync_execute +from posthog.cloud_utils import TEST_clear_cloud_cache +from posthog.constants import AvailableFeature from posthog.models import Person, Cohort from posthog.models.action import Action from posthog.models.action_step import ActionStep @@ -14,6 +16,7 @@ from posthog.models.team import Team from posthog.queries.session_recordings.session_recording_list_from_replay_summary import ( SessionRecordingListFromReplaySummary, + ttl_days, ) from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary from posthog.test.base import ( @@ -21,8 +24,8 @@ ClickhouseTestMixin, _create_event, also_test_with_materialized_columns, - snapshot_clickhouse_queries, flush_persons_and_events, + snapshot_clickhouse_queries, ) @@ -617,6 +620,26 @@ def test_event_filter_has_ttl_applied_too(self): assert len(session_recordings) == 1 assert session_recordings[0]["session_id"] == session_id_one + @snapshot_clickhouse_queries + def test_ttl_days(self): + assert ttl_days(self.team) == 21 + + TEST_clear_cloud_cache() + with self.is_cloud(True): + # Far enough in the future from `days_since_blob_ingestion` but not paid + with freeze_time("2023-09-01T12:00:01Z"): + assert ttl_days(self.team) == 30 + + self.team.organization.available_features = [AvailableFeature.RECORDINGS_PLAYLISTS] + + # Far enough in the future from `days_since_blob_ingestion` but paid + with freeze_time("2023-12-01T12:00:01Z"): + assert ttl_days(self.team) == 90 + + # Not far enough in the future from `days_since_blob_ingestion` + with freeze_time("2023-09-05T12:00:01Z"): + assert ttl_days(self.team) == 35 + @snapshot_clickhouse_queries def test_event_filter_with_active_sessions( self, diff --git a/posthog/queries/session_recordings/test/test_session_replay_summaries.py b/posthog/queries/session_recordings/test/test_session_replay_summaries.py index 0a87ac7473e5a..0b3e361fa9511 100644 --- a/posthog/queries/session_recordings/test/test_session_replay_summaries.py +++ b/posthog/queries/session_recordings/test/test_session_replay_summaries.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta from uuid import uuid4 -import pytz +from zoneinfo import ZoneInfo from dateutil.parser import isoparse from freezegun import freeze_time @@ -147,8 +147,8 @@ def test_session_replay_summaries_can_be_queried(self): session_id, self.team.pk, str(self.user.distinct_id), - datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=pytz.UTC), - datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=pytz.UTC), + datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=ZoneInfo("UTC")), + datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=ZoneInfo("UTC")), 33624, "https://first-url-ingested.com", 6, diff --git a/posthog/queries/test/test_retention.py b/posthog/queries/test/test_retention.py index 7f49141447b9a..42b7c596b14a9 100644 --- a/posthog/queries/test/test_retention.py +++ b/posthog/queries/test/test_retention.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime -import pytz +from zoneinfo import ZoneInfo from django.test import override_settings from rest_framework import status @@ -36,15 +36,14 @@ def _create_action(**kwargs): def _create_signup_actions(team, user_and_timestamps): - for distinct_id, timestamp in user_and_timestamps: _create_event(team=team, event="sign up", distinct_id=distinct_id, timestamp=timestamp) sign_up_action = _create_action(team=team, name="sign up") return sign_up_action -def _date(day, hour=5, month=0): - return datetime(2020, 6 + month, 10 + day, hour).isoformat() +def _date(day, hour=5, month=0, minute=0): + return datetime(2020, 6 + month, 10 + day, hour, minute).isoformat() def pluck(list_of_dicts, key, child_key=None): @@ -53,7 +52,7 @@ def pluck(list_of_dicts, key, child_key=None): def _create_events(team, user_and_timestamps, event="$pageview"): i = 0 - for (distinct_id, timestamp, *properties_args) in user_and_timestamps: + for distinct_id, timestamp, *properties_args in user_and_timestamps: properties = {"$some_property": "value"} if i % 2 == 0 else {} if len(properties_args) == 1: properties.update(properties_args[0]) @@ -129,7 +128,7 @@ def test_day_interval(self): pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], ) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -211,17 +210,17 @@ def test_month_interval(self): self.assertEqual( pluck(result, "date"), [ - datetime(2020, 1, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 2, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 3, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 4, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 5, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 8, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 9, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 10, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 11, 10, 0, tzinfo=pytz.UTC), + datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")), ], ) @@ -372,17 +371,17 @@ def test_month_interval_with_person_on_events_v2(self): self.assertEqual( pluck(result, "date"), [ - datetime(2020, 1, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 2, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 3, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 4, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 5, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 8, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 9, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 10, 10, 0, tzinfo=pytz.UTC), - datetime(2020, 11, 10, 0, tzinfo=pytz.UTC), + datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")), ], ) @@ -425,13 +424,13 @@ def test_week_interval(self): self.assertEqual( pluck(result, "date"), [ - datetime(2020, 6, 7, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 14, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 21, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 28, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 5, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 12, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 19, 0, tzinfo=pytz.UTC), + datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")), ], ) @@ -457,7 +456,7 @@ def test_hour_interval(self): ], ) - filter = RetentionFilter(data={"date_to": _date(0, hour=16), "period": "Hour"}) + filter = RetentionFilter(data={"date_to": _date(0, hour=16, minute=13), "period": "Hour"}) result = retention().run(filter, self.team, total_intervals=11) @@ -498,17 +497,17 @@ def test_hour_interval(self): self.assertEqual( pluck(result, "date"), [ - datetime(2020, 6, 10, 6, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 7, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 8, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 9, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 10, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 11, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 12, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 13, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 14, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 15, tzinfo=pytz.UTC), - datetime(2020, 6, 10, 16, tzinfo=pytz.UTC), + datetime(2020, 6, 10, 6, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 7, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 8, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 9, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 10, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 11, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 12, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 13, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 14, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 15, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 10, 16, tzinfo=ZoneInfo("UTC")), ], ) @@ -552,13 +551,13 @@ def test_interval_rounding(self): self.assertEqual( pluck(result, "date"), [ - datetime(2020, 6, 7, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 14, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 21, 0, tzinfo=pytz.UTC), - datetime(2020, 6, 28, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 5, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 12, 0, tzinfo=pytz.UTC), - datetime(2020, 7, 19, 0, tzinfo=pytz.UTC), + datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")), + datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")), ], ) @@ -838,7 +837,7 @@ def test_retention_event_action(self): self.assertEqual(len(result), 7) self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -871,7 +870,6 @@ def test_first_time_retention(self): ) def test_retention_with_properties(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"]) _create_person(team_id=self.team.pk, distinct_ids=["person2"]) @@ -902,7 +900,7 @@ def test_retention_with_properties(self): pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], ) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -956,7 +954,7 @@ def test_retention_with_user_properties(self): self.assertEqual(len(result), 7) self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]], @@ -1006,7 +1004,7 @@ def test_retention_with_user_properties_via_action(self): self.assertEqual(len(result), 7) self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]], @@ -1047,7 +1045,7 @@ def test_retention_action_start_point(self): self.assertEqual(len(result), 7) self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -1086,7 +1084,7 @@ def test_filter_test_accounts(self): pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], ) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -1156,7 +1154,6 @@ def _create_first_time_retention_events(self): return p1, p2, p3, p4 def test_retention_aggregate_by_distinct_id(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"test": "ok"}) _create_person(team_id=self.team.pk, distinct_ids=["person2"]) @@ -1196,7 +1193,7 @@ def test_retention_aggregate_by_distinct_id(self): "Day 10", ], ) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), @@ -1270,7 +1267,7 @@ def test_timezones(self): ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], ) - self.assertEqual(result_pacific[0]["date"], pytz.timezone("US/Pacific").localize(datetime(2020, 6, 10))) + self.assertEqual(result_pacific[0]["date"], datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific"))) self.assertEqual(result_pacific[0]["date"].isoformat(), "2020-06-10T00:00:00-07:00") self.assertEqual( @@ -1337,7 +1334,7 @@ def test_day_interval_sampled(self): pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], ) - self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC)) + self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py index 155afbe22c854..3cce0cfd1907a 100644 --- a/posthog/queries/test/test_trends.py +++ b/posthog/queries/test/test_trends.py @@ -5,7 +5,7 @@ from unittest.mock import patch, ANY from urllib.parse import parse_qsl, urlparse -import pytz +from zoneinfo import ZoneInfo from django.conf import settings from django.core.cache import cache from django.test import override_settings @@ -1631,8 +1631,8 @@ def test_hour_interval(self): ) self.assertEqual( { - "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC), - "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC), + "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")), + "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")), "entity_id": "event_name", "entity_math": None, "entity_order": None, @@ -1687,8 +1687,8 @@ def test_day_interval(self): ) self.assertEqual( { - "date_from": datetime(2020, 11, 1, tzinfo=pytz.UTC), - "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=pytz.UTC), + "date_from": datetime(2020, 11, 1, tzinfo=ZoneInfo("UTC")), + "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=ZoneInfo("UTC")), "entity_id": "event_name", "entity_math": None, "entity_order": None, @@ -3837,8 +3837,8 @@ def test_breakdown_hour_interval(self): { "breakdown_type": "event", "breakdown_value": "Safari", - "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC), - "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC), + "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")), + "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")), "entity_id": "event_name", "entity_math": None, "entity_type": "events", @@ -5603,7 +5603,7 @@ def test_timezones_hourly_relative_from(self): timestamp="2020-01-05T08:01:01", ) - query_time = pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 10, 1, 1)) + query_time = datetime(2020, 1, 5, 10, 1, 1, tzinfo=ZoneInfo(self.team.timezone)) utc_offset_hours = query_time.tzinfo.utcoffset(query_time).total_seconds() // 3600 # type: ignore utc_offset_sign = "-" if utc_offset_hours < 0 else "+" with freeze_time(query_time): @@ -5797,7 +5797,7 @@ def test_timezones_daily(self): timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere ) - with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 5, 0))): + with freeze_time(datetime(2020, 1, 5, 5, 0, tzinfo=ZoneInfo(self.team.timezone))): response = Trends().run( Filter(data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up"}]}, team=self.team), self.team, @@ -6013,7 +6013,7 @@ def test_timezones_weekly(self): self.team.save() # TRICKY: This is the previous UTC day in Asia/Tokyo - with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))): + with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))): # Total volume query response_sunday = Trends().run( Filter( @@ -6034,7 +6034,7 @@ def test_timezones_weekly(self): self.team.save() # TRICKY: This is the previous UTC day in Asia/Tokyo - with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))): + with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))): # Total volume query response_monday = Trends().run( Filter( diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py index b5ffeb0b3c33a..7fe281a0c158c 100644 --- a/posthog/queries/trends/breakdown.py +++ b/posthog/queries/trends/breakdown.py @@ -4,7 +4,7 @@ from datetime import datetime from typing import Any, Callable, Dict, List, Optional, Tuple, Union -import pytz +from zoneinfo import ZoneInfo from django.forms import ValidationError from posthog.constants import ( @@ -294,7 +294,6 @@ def get_query(self) -> Tuple[str, Dict, Callable]: ) else: - breakdown_filter = breakdown_filter.format(**breakdown_filter_params) if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: @@ -476,7 +475,6 @@ def _get_breakdown_value(self, breakdown: str) -> str: return breakdown_value def _get_histogram_breakdown_values(self, raw_breakdown_value: str, buckets: List[int]): - multi_if_conditionals = [] values_arr = [] @@ -599,8 +597,8 @@ def _get_persons_url( getattr(point_date, "hour", 0), getattr(point_date, "minute", 0), getattr(point_date, "second", 0), - tzinfo=getattr(point_date, "tzinfo", pytz.UTC), - ).astimezone(pytz.UTC) + tzinfo=getattr(point_date, "tzinfo", ZoneInfo("UTC")), + ).astimezone(ZoneInfo("UTC")) filter_params = filter.to_params() extra_params = { diff --git a/posthog/queries/trends/test/test_person.py b/posthog/queries/trends/test/test_person.py index 1d98dfd83b7c9..f68a4ed13b9bd 100644 --- a/posthog/queries/trends/test/test_person.py +++ b/posthog/queries/trends/test/test_person.py @@ -1,8 +1,11 @@ +import json +from datetime import datetime from uuid import UUID from dateutil.relativedelta import relativedelta from django.utils import timezone from freezegun.api import freeze_time +from unittest.case import skip from posthog.models.entity import Entity from posthog.models.filters import Filter @@ -15,12 +18,12 @@ ClickhouseTestMixin, _create_event, _create_person, + flush_persons_and_events, snapshot_clickhouse_queries, ) class TestPerson(ClickhouseTestMixin, APIBaseTest): - # Note: not using `@snapshot_clickhouse_queries` here because the ordering of the session_ids in the recording # query is not guaranteed, so adding it would lead to a flaky test. @freeze_time("2021-01-21T20:00:00.000Z") @@ -155,3 +158,145 @@ def test_group_query_includes_recording_events(self): } ], ) + + +class TestPersonIntegration(ClickhouseTestMixin, APIBaseTest): + def test_weekly_active_users(self): + for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep + _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"]) + _create_event( + event="pageview", + distinct_id=f"u_{d}", + team=self.team, + timestamp=datetime(2023, 9, d, 00, 42), + ) + flush_persons_and_events() + + # request weekly active users in the following week + filter = { + "insight": "TRENDS", + "date_from": "2023-09-17T13:37:00", + "date_to": "2023-09-24T13:37:00", + "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]), + } + insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter) + insight_response = (insight_response.json()).get("result") + + self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023") + self.assertEqual(insight_response[0].get("data")[5], 2) + + persons_url = insight_response[0].get("persons_urls")[5].get("url") + response = self.client.get("/" + persons_url) + + data = response.json() + self.assertEqual(data.get("results")[0].get("count"), 2) + self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_17", "u_16"]) + + def test_weekly_active_users_grouped_by_week(self): + for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep + _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"]) + _create_event( + event="pageview", + distinct_id=f"u_{d}", + team=self.team, + timestamp=datetime(2023, 9, d, 00, 42), + ) + flush_persons_and_events() + + # request weekly active users in the following week + filter = { + "insight": "TRENDS", + "date_from": "2023-09-17T13:37:00", + "date_to": "2023-09-24T13:37:00", + "interval": "week", + "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]), + } + insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter) + insight_response = (insight_response.json()).get("result") + + self.assertEqual(insight_response[0].get("labels")[0], "17-Sep-2023") + self.assertEqual(insight_response[0].get("data")[0], 7) + + persons_url = insight_response[0].get("persons_urls")[0].get("url") + response = self.client.get("/" + persons_url) + + data = response.json() + self.assertEqual(data.get("results")[0].get("count"), 7) + self.assertEqual( + [item["name"] for item in data.get("results")[0].get("people")], + ["u_17", "u_16", "u_15", "u_14", "u_13", "u_12", "u_11"], + ) + + def test_weekly_active_users_cumulative(self): + for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep + _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"]) + _create_event( + event="pageview", + distinct_id=f"u_{d}", + team=self.team, + timestamp=datetime(2023, 9, d, 00, 42), + ) + flush_persons_and_events() + + # request weekly active users in the following week + filter = { + "insight": "TRENDS", + "date_from": "2023-09-10T13:37:00", + "date_to": "2023-09-24T13:37:00", + "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]), + "display": "ActionsLineGraphCumulative", + } + insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter) + insight_response = (insight_response.json()).get("result") + + self.assertEqual(insight_response[0].get("labels")[1], "11-Sep-2023") + self.assertEqual(insight_response[0].get("data")[1], 3) + + persons_url = insight_response[0].get("persons_urls")[1].get("url") + response = self.client.get("/" + persons_url) + + data = response.json() + self.assertEqual(data.get("results")[0].get("count"), 2) + self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_11", "u_10"]) + + @skip("see PR 17356") + def test_weekly_active_users_breakdown(self): + for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep + _create_person(team_id=self.team.pk, distinct_ids=[f"a_{d}"]) + _create_person(team_id=self.team.pk, distinct_ids=[f"b_{d}"]) + _create_event( + event="pageview", + distinct_id=f"a_{d}", + properties={"some_prop": "a"}, + team=self.team, + timestamp=datetime(2023, 9, d, 00, 42), + ) + _create_event( + event="pageview", + distinct_id=f"b_{d}", + properties={"some_prop": "b"}, + team=self.team, + timestamp=datetime(2023, 9, d, 00, 42), + ) + flush_persons_and_events() + + # request weekly active users in the following week + filter = { + "insight": "TRENDS", + "date_from": "2023-09-17T13:37:00", + "date_to": "2023-09-24T13:37:00", + "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]), + "breakdown": "some_prop", + } + insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter) + insight_response = (insight_response.json()).get("result") + + self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023") + # self.assertEqual(insight_response[0].get("data")[5], 2) + + persons_url = insight_response[0].get("persons_urls")[5].get("url") + response = self.client.get("/" + persons_url) + + data = response.json() + # self.assertEqual(data.get("results")[0].get("count"), 2) + self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["a_17", "a_16"]) diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py index 3d57726d7886b..154e105e77f92 100644 --- a/posthog/queries/trends/total_volume.py +++ b/posthog/queries/trends/total_volume.py @@ -1,5 +1,5 @@ import urllib.parse -from datetime import date, datetime +from datetime import date, datetime, timedelta from typing import Any, Callable, Dict, List, Tuple, Union from posthog.clickhouse.query_tagging import tag_queries @@ -256,6 +256,21 @@ def _parse(result: List) -> List: return _parse + def _offset_date_from(self, point_datetime: datetime, filter: Filter, entity: Entity) -> datetime | None: + if filter.display == TRENDS_CUMULATIVE: + return filter.date_from + elif entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: + # :TRICKY: We have to offset the date by one, as the final query already subtracts 7 days + return point_datetime + timedelta(days=1) + else: + return point_datetime + + def _offset_date_to(self, point_datetime: datetime, filter: Filter, entity: Entity, team: Team) -> datetime: + if entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: + return point_datetime + else: + return offset_time_series_date_by_interval(point_datetime, filter=filter, team=team) + def _get_persons_url( self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime] ) -> List[Dict[str, Any]]: @@ -267,8 +282,8 @@ def _get_persons_url( "entity_id": entity.id, "entity_type": entity.type, "entity_math": entity.math, - "date_from": filter.date_from if filter.display == TRENDS_CUMULATIVE else point_datetime, - "date_to": offset_time_series_date_by_interval(point_datetime, filter=filter, team=team), + "date_from": self._offset_date_from(point_datetime, filter=filter, entity=entity), + "date_to": self._offset_date_to(point_datetime, filter=filter, entity=entity, team=team), "entity_order": entity.order, } diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py index e7a96b4eeca5e..940abba59fab5 100644 --- a/posthog/queries/trends/trends.py +++ b/posthog/queries/trends/trends.py @@ -3,8 +3,8 @@ from datetime import datetime, timedelta from itertools import accumulate from typing import Any, Callable, Dict, List, Optional, Tuple, cast +from zoneinfo import ZoneInfo -import pytz from dateutil import parser from django.db.models.query import Prefetch from sentry_sdk import push_scope @@ -49,7 +49,6 @@ def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> Tup # Use cached result even on refresh if team has strict caching enabled def get_cached_result(self, filter: Filter, team: Team) -> Optional[List[Dict[str, Any]]]: - if not team.strict_caching_enabled or filter.breakdown or filter.display != TRENDS_LINEAR: return None @@ -80,7 +79,7 @@ def is_present_timerange(self, cached_result: List[Dict[str, Any]], filter: Filt latest_date = cached_result[0]["days"][len(cached_result[0]["days"]) - 1] parsed_latest_date = parser.parse(latest_date) - parsed_latest_date = parsed_latest_date.replace(tzinfo=pytz.timezone(team.timezone)) + parsed_latest_date = parsed_latest_date.replace(tzinfo=ZoneInfo(team.timezone)) _is_present = is_filter_date_present(filter, parsed_latest_date) else: _is_present = False diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py index 00ab25e98460c..93dd843349046 100644 --- a/posthog/queries/trends/trends_event_query_base.py +++ b/posthog/queries/trends/trends_event_query_base.py @@ -104,14 +104,14 @@ def _get_not_null_actor_condition(self) -> str: return f"""AND "$group_{self._entity.math_group_type_index}" != ''""" def _get_date_filter(self) -> Tuple[str, Dict]: - date_filter = "" - query_params: Dict[str, Any] = {} + date_query = "" + date_params: Dict[str, Any] = {} query_date_range = QueryDateRange(self._filter, self._team) parsed_date_from, date_from_params = query_date_range.date_from parsed_date_to, date_to_params = query_date_range.date_to - query_params.update(date_from_params) - query_params.update(date_to_params) + date_params.update(date_from_params) + date_params.update(date_to_params) self.parsed_date_from = parsed_date_from self.parsed_date_to = parsed_date_to @@ -121,17 +121,17 @@ def _get_date_filter(self) -> Tuple[str, Dict]: self._filter, self._entity, self._team_id ) self.active_user_params = active_user_format_params - query_params.update(active_user_query_params) + date_params.update(active_user_query_params) - date_filter = "{parsed_date_from_prev_range} {parsed_date_to}".format( + date_query = "{parsed_date_from_prev_range} {parsed_date_to}".format( **active_user_format_params, parsed_date_to=parsed_date_to ) else: - date_filter = "{parsed_date_from} {parsed_date_to}".format( + date_query = "{parsed_date_from} {parsed_date_to}".format( parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to ) - return date_filter, query_params + return date_query, date_params def _get_entity_query(self) -> Tuple[str, Dict]: entity_params, entity_format_params = get_entity_filtering_params( diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py index a153e7f0eae56..46cd2a8041f32 100644 --- a/posthog/queries/trends/util.py +++ b/posthog/queries/trends/util.py @@ -1,8 +1,8 @@ import datetime from datetime import timedelta from typing import Any, Dict, List, Optional, Tuple, TypeVar +from zoneinfo import ZoneInfo -import pytz import structlog from dateutil.relativedelta import relativedelta from rest_framework.exceptions import ValidationError @@ -191,5 +191,5 @@ def offset_time_series_date_by_interval(date: datetime.datetime, *, filter: F, t else: # "day" is the default interval date = date.replace(hour=23, minute=59, second=59, microsecond=999999) if date.tzinfo is None: - date = pytz.timezone(team.timezone).localize(date) + date = date.replace(tzinfo=ZoneInfo(team.timezone)) return date diff --git a/posthog/queries/util.py b/posthog/queries/util.py index 936921732285b..ec218785b1dc9 100644 --- a/posthog/queries/util.py +++ b/posthog/queries/util.py @@ -3,7 +3,7 @@ from enum import Enum, auto from typing import Any, Dict, Optional, Union -import pytz +from zoneinfo import ZoneInfo from django.utils import timezone from rest_framework.exceptions import ValidationError @@ -67,16 +67,16 @@ class PersonPropertiesMode(Enum): "month": "toIntervalMonth", } + # TODO: refactor since this is only used in one spot now def format_ch_timestamp(timestamp: datetime, convert_to_timezone: Optional[str] = None): if convert_to_timezone: # Here we probably get a timestamp set to the beginning of the day (00:00), in UTC # We need to convert that UTC timestamp to the local timestamp (00:00 in US/Pacific for example) # Then we convert it back to UTC (08:00 in UTC) - if timestamp.tzinfo and timestamp.tzinfo != pytz.UTC: + if timestamp.tzinfo and timestamp.tzinfo != ZoneInfo("UTC"): raise ValidationError(detail="You must pass a timestamp with no timezone or UTC") - timestamp = pytz.timezone(convert_to_timezone).localize(timestamp.replace(tzinfo=None)).astimezone(pytz.UTC) - + timestamp = timestamp.replace(tzinfo=ZoneInfo(convert_to_timezone)).astimezone(ZoneInfo("UTC")) return timestamp.strftime("%Y-%m-%d %H:%M:%S") diff --git a/posthog/schema.py b/posthog/schema.py index 72b581e8c863c..b988b9618e0ef 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -369,17 +369,45 @@ class SavedInsightNode(BaseModel): class Config: extra = Extra.forbid + allowSorting: Optional[bool] = Field( + None, description="Can the user click on column headers to sort the table? (default: true)" + ) embedded: Optional[bool] = Field(None, description="Query is embedded inside another bordered component") + expandable: Optional[bool] = Field(None, description="Can expand row to show raw event data (default: true)") full: Optional[bool] = Field(None, description="Show with most visual options enabled. Used in insight scene.") kind: str = Field("SavedInsightNode", const=True) + propertiesViaUrl: Optional[bool] = Field(None, description="Link properties via the URL (default: false)") shortId: str + showActions: Optional[bool] = Field(None, description="Show the kebab menu at the end of the row") + showColumnConfigurator: Optional[bool] = Field( + None, description="Show a button to configure the table's columns if possible" + ) showCorrelationTable: Optional[bool] = None + showDateRange: Optional[bool] = Field(None, description="Show date range selector") + showElapsedTime: Optional[bool] = Field(None, description="Show the time it takes to run a query") + showEventFilter: Optional[bool] = Field( + None, description="Include an event filter above the table (EventsNode only)" + ) + showExport: Optional[bool] = Field(None, description="Show the export button") showFilters: Optional[bool] = None showHeader: Optional[bool] = None + showHogQLEditor: Optional[bool] = Field(None, description="Include a HogQL query editor above HogQL tables") showLastComputation: Optional[bool] = None showLastComputationRefresh: Optional[bool] = None + showOpenEditorButton: Optional[bool] = Field( + None, description="Show a button to open the current query as a new insight. (default: true)" + ) + showPersistentColumnConfigurator: Optional[bool] = Field( + None, description="Show a button to configure and persist the table's default columns if possible" + ) + showPropertyFilter: Optional[bool] = Field(None, description="Include a property filter above the table") + showReload: Optional[bool] = Field(None, description="Show a reload button") showResults: Optional[bool] = None + showResultsTable: Optional[bool] = Field(None, description="Show a results table") + showSavedQueries: Optional[bool] = Field(None, description="Shows a list of saved queries") + showSearch: Optional[bool] = Field(None, description="Include a free text search field (PersonsNode only)") showTable: Optional[bool] = None + showTimings: Optional[bool] = Field(None, description="Show a detailed query timing breakdown") class SessionPropertyFilter(BaseModel): @@ -594,6 +622,14 @@ class Config: toggledLifecycles: Optional[List[LifecycleToggle]] = None +class LifecycleQueryResponse(BaseModel): + class Config: + extra = Extra.forbid + + result: List[Dict[str, Any]] + timings: Optional[List[QueryTiming]] = None + + class PersonPropertyFilter(BaseModel): class Config: extra = Extra.forbid @@ -1143,6 +1179,7 @@ class Config: PropertyGroupFilter, ] ] = Field(None, description="Property filters for all series") + response: Optional[LifecycleQueryResponse] = None samplingFactor: Optional[float] = Field(None, description="Sampling rate") series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include") diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py index 429566418aa1f..ea19b3b405a2b 100644 --- a/posthog/session_recordings/realtime_snapshots.py +++ b/posthog/session_recordings/realtime_snapshots.py @@ -38,6 +38,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op key = get_key(team_id, session_id) encoded_snapshots = redis.zrange(key, 0, -1, withscores=True) + # We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be + # sending data to redis + redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id})) + if not encoded_snapshots and attempt_count < ATTEMPT_MAX: logger.info( "No realtime snapshots found, publishing subscription and retrying", diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py index 6f5664470c88e..a970414f04fd1 100644 --- a/posthog/settings/ingestion.py +++ b/posthog/settings/ingestion.py @@ -38,3 +38,6 @@ "Environment variable REPLAY_EVENTS_NEW_CONSUMER_RATIO is not between 0 and 1. Setting to 0 to be safe." ) REPLAY_EVENTS_NEW_CONSUMER_RATIO = 0 + +REPLAY_RETENTION_DAYS_MIN = 30 +REPLAY_RETENTION_DAYS_MAX = 90 diff --git a/posthog/settings/web.py b/posthog/settings/web.py index 9f61e9ee11e82..ca0c035765a7e 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -219,7 +219,7 @@ STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") STATIC_URL = "/static/" STATICFILES_DIRS = [os.path.join(BASE_DIR, "frontend/dist"), os.path.join(BASE_DIR, "posthog/year_in_posthog/images")] -STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" +STATICFILES_STORAGE = "whitenoise.storage.ManifestStaticFilesStorage" AUTH_USER_MODEL = "posthog.User" diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index e43c7ddb817fc..636b3e76b93e9 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -20,6 +20,7 @@ from posthog.hogql.query import execute_hogql_query from posthog.models import Organization, Plugin, Team from posthog.models.dashboard import Dashboard +from posthog.models.event.util import create_event from posthog.models.feature_flag import FeatureFlag from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping @@ -27,7 +28,16 @@ from posthog.models.sharing_configuration import SharingConfiguration from posthog.schema import EventsQuery from posthog.session_recordings.test.test_factory import create_snapshot -from posthog.tasks.usage_report import capture_event, send_all_org_usage_reports +from posthog.tasks.usage_report import ( + _get_all_org_reports, + _get_all_usage_data_as_team_rows, + _get_full_org_usage_report, + _get_full_org_usage_report_as_dict, + _get_team_report, + capture_event, + get_instance_metadata, + send_all_org_usage_reports, +) from posthog.test.base import ( APIBaseTest, ClickhouseDestroyTablesMixin, @@ -37,8 +47,7 @@ flush_persons_and_events, snapshot_clickhouse_queries, ) -from posthog.models.event.util import create_event -from posthog.utils import get_machine_id +from posthog.utils import get_machine_id, get_previous_day logger = structlog.get_logger(__name__) @@ -296,16 +305,20 @@ def _test_usage_report(self) -> List[dict]: self._create_plugin("Installed but not enabled", False) self._create_plugin("Installed and enabled", True) - all_reports = send_all_org_usage_reports(dry_run=False) + period = get_previous_day() + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) + report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period)) + ) - report = all_reports[0] assert report["table_sizes"] assert report["table_sizes"]["posthog_event"] < 10**7 # <10MB assert report["table_sizes"]["posthog_sessionrecordingevent"] < 10**7 # <10MB assert len(all_reports) == 2 - expectation = [ + expectations = [ { "deployment_infrastructure": "tests", "realm": "hosted-clickhouse", @@ -316,12 +329,12 @@ def _test_usage_report(self) -> List[dict]: "site_url": "http://test.posthog.com", "product": "open source", "helm": {}, - "clickhouse_version": all_reports[0]["clickhouse_version"], + "clickhouse_version": report["clickhouse_version"], "users_who_logged_in": [], "users_who_logged_in_count": 0, "users_who_signed_up": [], "users_who_signed_up_count": 0, - "table_sizes": all_reports[0]["table_sizes"], + "table_sizes": report["table_sizes"], "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1}, "plugins_enabled": {"Installed and enabled": 1}, "instance_tag": "none", @@ -441,12 +454,12 @@ def _test_usage_report(self) -> List[dict]: "site_url": "http://test.posthog.com", "product": "open source", "helm": {}, - "clickhouse_version": all_reports[1]["clickhouse_version"], + "clickhouse_version": report["clickhouse_version"], "users_who_logged_in": [], "users_who_logged_in_count": 0, "users_who_signed_up": [], "users_who_signed_up_count": 0, - "table_sizes": all_reports[1]["table_sizes"], + "table_sizes": report["table_sizes"], "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1}, "plugins_enabled": {"Installed and enabled": 1}, "instance_tag": "none", @@ -525,18 +538,22 @@ def _test_usage_report(self) -> List[dict]: }, ] - for item in expectation: + for item in expectations: item.update(**self.expected_properties) # tricky: list could be in different order assert len(all_reports) == 2 - for report in all_reports: - if report["organization_id"] == expectation[0]["organization_id"]: - assert report == expectation[0] - elif report["organization_id"] == expectation[1]["organization_id"]: - assert report == expectation[1] + full_reports = [] + for expectation in expectations: + report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report( + all_reports[expectation["organization_id"]], get_instance_metadata(period) + ) + ) + assert report == expectation + full_reports.append(report) - return all_reports + return full_reports @freeze_time("2022-01-10T00:01:00Z") @patch("os.environ", {"DEPLOYMENT": "tests"}) @@ -552,6 +569,8 @@ def test_unlicensed_usage_report(self, mock_post: MagicMock, mock_client: MagicM mock_client.return_value = mock_posthog all_reports = self._test_usage_report() + with self.settings(SITE_URL="http://test.posthog.com"): + send_all_org_usage_reports() # Check calls to other services mock_post.assert_not_called() @@ -597,20 +616,21 @@ def test_usage_report_hogql_queries(self) -> None: run_events_query(query=EventsQuery(select=["event"], limit=50), team=self.team) sync_execute("SYSTEM FLUSH LOGS") - all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1))) - assert len(all_reports) == 1 + period = get_previous_day(at=now() + relativedelta(days=1)) + period_start, period_end = period + all_reports = _get_all_usage_data_as_team_rows(period_start, period_end) - report = all_reports[0]["teams"][str(self.team.pk)] + report = _get_team_report(all_reports, self.team) # We selected 200 or 50 rows, but still read 100 rows to return the query - assert report["hogql_app_rows_read"] == 100 - assert report["hogql_app_bytes_read"] > 0 - assert report["event_explorer_app_rows_read"] == 100 - assert report["event_explorer_app_bytes_read"] > 0 + assert report.hogql_app_rows_read == 100 + assert report.hogql_app_bytes_read > 0 + assert report.event_explorer_app_rows_read == 100 + assert report.event_explorer_app_bytes_read > 0 # Nothing was read via the API - assert report["hogql_api_rows_read"] == 0 - assert report["event_explorer_api_rows_read"] == 0 + assert report.hogql_api_rows_read == 0 + assert report.event_explorer_api_rows_read == 0 @freeze_time("2022-01-10T00:01:00Z") @@ -680,21 +700,19 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho flush_persons_and_events() with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"): - all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1))) + period = get_previous_day(at=now() + relativedelta(days=1)) + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) assert len(all_reports) == 3 - all_reports = sorted(all_reports, key=lambda x: x["organization_name"]) - - assert [all_reports["organization_name"] for all_reports in all_reports] == [ - "Org 1", - "Org 2", - "PostHog", - ] - - org_1_report = all_reports[0] - org_2_report = all_reports[1] - analytics_report = all_reports[2] + org_1_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period)) + ) + assert org_1_report["organization_name"] == "Org 1" + org_2_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period)) + ) assert org_1_report["organization_name"] == "Org 1" assert org_1_report["decide_requests_count_in_period"] == 11 @@ -721,26 +739,6 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0 assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0 - # billing service calls are made only for org1, which has decide requests, and analytics org - which has decide usage events. - calls = [ - call( - org_1_report["organization_id"], - ANY, - ), - call( - analytics_report["organization_id"], - ANY, - ), - ] - assert billing_task_mock.delay.call_count == 2 - billing_task_mock.delay.assert_has_calls( - calls, - any_order=True, - ) - - # capture usage report calls are made for all orgs - assert posthog_capture_mock.return_value.capture.call_count == 3 - @patch("posthog.tasks.usage_report.Client") @patch("posthog.tasks.usage_report.send_report_to_billing_service") def test_usage_report_local_evaluation_requests( @@ -792,21 +790,19 @@ def test_usage_report_local_evaluation_requests( flush_persons_and_events() with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"): - all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1))) + period = get_previous_day(at=now() + relativedelta(days=1)) + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) assert len(all_reports) == 3 - all_reports = sorted(all_reports, key=lambda x: x["organization_name"]) - - assert [all_reports["organization_name"] for all_reports in all_reports] == [ - "Org 1", - "Org 2", - "PostHog", - ] - - org_1_report = all_reports[0] - org_2_report = all_reports[1] - analytics_report = all_reports[2] + org_1_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period)) + ) + assert org_1_report["organization_name"] == "Org 1" + org_2_report = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period)) + ) assert org_1_report["organization_name"] == "Org 1" assert org_1_report["local_evaluation_requests_count_in_period"] == 11 @@ -837,26 +833,6 @@ def test_usage_report_local_evaluation_requests( assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0 assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0 - # billing service calls are made only for org1, which has decide requests, and analytics org - which has local evaluation usage events. - calls = [ - call( - org_1_report["organization_id"], - ANY, - ), - call( - analytics_report["organization_id"], - ANY, - ), - ] - assert billing_task_mock.delay.call_count == 2 - billing_task_mock.delay.assert_has_calls( - calls, - any_order=True, - ) - - # capture usage report calls are made for all orgs - assert posthog_capture_mock.return_value.capture.call_count == 3 - class SendUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest): def setUp(self) -> None: @@ -907,18 +883,26 @@ def test_send_usage(self, mock_post: MagicMock, mock_client: MagicMock) -> None: mock_posthog = MagicMock() mock_client.return_value = mock_posthog - all_reports = send_all_org_usage_reports(dry_run=False) + period = get_previous_day() + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) + full_report_as_dict = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period)) + ) + send_all_org_usage_reports(dry_run=False) license = License.objects.first() assert license token = build_billing_token(license, self.organization) mock_post.assert_called_once_with( - f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"} + f"{BILLING_SERVICE_URL}/api/usage", + json=full_report_as_dict, + headers={"Authorization": f"Bearer {token}"}, ) mock_posthog.capture.assert_any_call( get_machine_id(), "organization usage report", - {**all_reports[0], "scope": "machine"}, + {**full_report_as_dict, "scope": "machine"}, groups={"instance": ANY}, timestamp=None, ) @@ -935,18 +919,26 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) -> mock_posthog = MagicMock() mock_client.return_value = mock_posthog - all_reports = send_all_org_usage_reports(dry_run=False) + period = get_previous_day() + period_start, period_end = period + all_reports = _get_all_org_reports(period_start, period_end) + full_report_as_dict = _get_full_org_usage_report_as_dict( + _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period)) + ) + send_all_org_usage_reports(dry_run=False) license = License.objects.first() assert license token = build_billing_token(license, self.organization) mock_post.assert_called_once_with( - f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"} + f"{BILLING_SERVICE_URL}/api/usage", + json=full_report_as_dict, + headers={"Authorization": f"Bearer {token}"}, ) mock_posthog.capture.assert_any_call( self.user.distinct_id, "organization usage report", - {**all_reports[0], "scope": "user"}, + {**full_report_as_dict, "scope": "user"}, groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)}, timestamp=None, ) diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index 45f82b9882374..612213086629e 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -534,6 +534,281 @@ def convert_team_usage_rows_to_dict(rows: List[Union[dict, Tuple[int, int]]]) -> return team_id_map +def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[str, Any]: + """ + Gets all usage data for the specified period. Clickhouse is good at counting things so + we count across all teams rather than doing it one by one + """ + return dict( + teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(), + teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period( + period_start, period_end, count_distinct=True + ), + teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period( + period_start.replace(day=1), period_end + ), + teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period( + period_start, period_end + ), + # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end), + # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end), + teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end), + teams_with_recording_count_total=get_teams_with_recording_count_total(), + teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period( + period_start, period_end, FlagRequestType.DECIDE + ), + teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period( + period_start.replace(day=1), period_end, FlagRequestType.DECIDE + ), + teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period( + period_start, period_end, FlagRequestType.LOCAL_EVALUATION + ), + teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period( + period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION + ), + teams_with_group_types_total=list( + GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id") + ), + teams_with_dashboard_count=list( + Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id") + ), + teams_with_dashboard_template_count=list( + Dashboard.objects.filter(creation_mode="template") + .values("team_id") + .annotate(total=Count("id")) + .order_by("team_id") + ), + teams_with_dashboard_shared_count=list( + Dashboard.objects.filter(sharingconfiguration__enabled=True) + .values("team_id") + .annotate(total=Count("id")) + .order_by("team_id") + ), + teams_with_dashboard_tagged_count=list( + Dashboard.objects.filter(tagged_items__isnull=False) + .values("team_id") + .annotate(total=Count("id")) + .order_by("team_id") + ), + teams_with_ff_count=list(FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")), + teams_with_ff_active_count=list( + FeatureFlag.objects.filter(active=True).values("team_id").annotate(total=Count("id")).order_by("team_id") + ), + teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_bytes", + query_types=["hogql_query", "HogQLQuery"], + access_method="", + ), + teams_with_hogql_app_rows_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_rows", + query_types=["hogql_query", "HogQLQuery"], + access_method="", + ), + teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric( + period_start, + period_end, + metric="query_duration_ms", + query_types=["hogql_query", "HogQLQuery"], + access_method="", + ), + teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_bytes", + query_types=["hogql_query", "HogQLQuery"], + access_method="personal_api_key", + ), + teams_with_hogql_api_rows_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_rows", + query_types=["hogql_query", "HogQLQuery"], + access_method="personal_api_key", + ), + teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric( + period_start, + period_end, + metric="query_duration_ms", + query_types=["hogql_query", "HogQLQuery"], + access_method="personal_api_key", + ), + teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_bytes", + query_types=["EventsQuery"], + access_method="", + ), + teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_rows", + query_types=["EventsQuery"], + access_method="", + ), + teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric( + period_start, + period_end, + metric="query_duration_ms", + query_types=["EventsQuery"], + access_method="", + ), + teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_bytes", + query_types=["EventsQuery"], + access_method="personal_api_key", + ), + teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric( + period_start, + period_end, + metric="read_rows", + query_types=["EventsQuery"], + access_method="personal_api_key", + ), + teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric( + period_start, + period_end, + metric="query_duration_ms", + query_types=["EventsQuery"], + access_method="personal_api_key", + ), + ) + + +def _get_all_usage_data_as_team_rows(period_start: datetime, period_end: datetime) -> Dict[str, Any]: + """ + Gets all usage data for the specified period as a map of team_id -> value. This makes it faster + to access the data than looping over all_data to find what we want. + """ + all_data = _get_all_usage_data(period_start, period_end) + # convert it to a map of team_id -> value + for key, rows in all_data.items(): + all_data[key] = convert_team_usage_rows_to_dict(rows) + return all_data + + +def _get_teams_for_usage_reports() -> Sequence[Team]: + return list( + Team.objects.select_related("organization").exclude( + Q(organization__for_internal_metrics=True) | Q(is_demo=True) + ) + ) + + +def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounters: + decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0) + decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0) + local_evaluation_requests_count_in_period = all_data["teams_with_local_evaluation_requests_count_in_period"].get( + team.id, 0 + ) + local_evaluation_requests_count_in_month = all_data["teams_with_local_evaluation_requests_count_in_month"].get( + team.id, 0 + ) + return UsageReportCounters( + event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0), + event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0), + event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0), + event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(team.id, 0), + # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0), + # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0), + recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0), + recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0), + group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0), + decide_requests_count_in_period=decide_requests_count_in_period, + decide_requests_count_in_month=decide_requests_count_in_month, + local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period, + local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month, + billable_feature_flag_requests_count_in_month=decide_requests_count_in_month + + (local_evaluation_requests_count_in_month * 10), + billable_feature_flag_requests_count_in_period=decide_requests_count_in_period + + (local_evaluation_requests_count_in_period * 10), + dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0), + dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0), + dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0), + dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0), + ff_count=all_data["teams_with_ff_count"].get(team.id, 0), + ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0), + hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0), + hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0), + hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0), + hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0), + hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0), + hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0), + event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0), + event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0), + event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0), + event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0), + event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0), + event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0), + ) + + +def _add_team_report_to_org_reports( + org_reports: Dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime +) -> None: + org_id = str(team.organization.id) + if org_id not in org_reports: + org_report = OrgReport( + date=period_start.strftime("%Y-%m-%d"), + organization_id=org_id, + organization_name=team.organization.name, + organization_created_at=team.organization.created_at.isoformat(), + organization_user_count=get_org_user_count(org_id), + team_count=1, + teams={str(team.id): team_report}, + **dataclasses.asdict(team_report), # Clone the team report as the basis + ) + org_reports[org_id] = org_report + else: + org_report = org_reports[org_id] + org_report.teams[str(team.id)] = team_report + org_report.team_count += 1 + + # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report + for field in dataclasses.fields(UsageReportCounters): + if hasattr(team_report, field.name): + setattr( + org_report, + field.name, + getattr(org_report, field.name) + getattr(team_report, field.name), + ) + + +def _get_all_org_reports(period_start: datetime, period_end: datetime) -> Dict[str, OrgReport]: + all_data = _get_all_usage_data_as_team_rows(period_start, period_end) + + teams = _get_teams_for_usage_reports() + + org_reports: Dict[str, OrgReport] = {} + + print("Generating reports for teams...") # noqa T201 + time_now = datetime.now() + for team in teams: + team_report = _get_team_report(all_data, team) + _add_team_report_to_org_reports(org_reports, team, team_report, period_start) + + time_since = datetime.now() - time_now + print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201 + return org_reports + + +def _get_full_org_usage_report(org_report: OrgReport, instance_metadata: InstanceMetadata) -> FullUsageReport: + return FullUsageReport( + **dataclasses.asdict(org_report), + **dataclasses.asdict(instance_metadata), + ) + + +def _get_full_org_usage_report_as_dict(full_report: FullUsageReport) -> Dict[str, Any]: + return dataclasses.asdict(full_report) + + @app.task(ignore_result=True, max_retries=3, autoretry_for=(Exception,)) def send_all_org_usage_reports( dry_run: bool = False, @@ -541,7 +816,7 @@ def send_all_org_usage_reports( capture_event_name: Optional[str] = None, skip_capture_event: bool = False, only_organization_id: Optional[str] = None, -) -> List[dict]: # Dict[str, OrgReport]: +) -> None: capture_event_name = capture_event_name or "organization usage report" at_date = parser.parse(at) if at else None @@ -550,250 +825,8 @@ def send_all_org_usage_reports( instance_metadata = get_instance_metadata(period) - # Clickhouse is good at counting things so we count across all teams rather than doing it one by one try: - all_data = dict( - teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(), - teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period( - period_start, period_end, count_distinct=True - ), - teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period( - period_start.replace(day=1), period_end - ), - teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period( - period_start, period_end - ), - # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end), - # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end), - teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end), - teams_with_recording_count_total=get_teams_with_recording_count_total(), - teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period( - period_start, period_end, FlagRequestType.DECIDE - ), - teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period( - period_start.replace(day=1), period_end, FlagRequestType.DECIDE - ), - teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period( - period_start, period_end, FlagRequestType.LOCAL_EVALUATION - ), - teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period( - period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION - ), - teams_with_group_types_total=list( - GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id") - ), - teams_with_dashboard_count=list( - Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id") - ), - teams_with_dashboard_template_count=list( - Dashboard.objects.filter(creation_mode="template") - .values("team_id") - .annotate(total=Count("id")) - .order_by("team_id") - ), - teams_with_dashboard_shared_count=list( - Dashboard.objects.filter(sharingconfiguration__enabled=True) - .values("team_id") - .annotate(total=Count("id")) - .order_by("team_id") - ), - teams_with_dashboard_tagged_count=list( - Dashboard.objects.filter(tagged_items__isnull=False) - .values("team_id") - .annotate(total=Count("id")) - .order_by("team_id") - ), - teams_with_ff_count=list( - FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id") - ), - teams_with_ff_active_count=list( - FeatureFlag.objects.filter(active=True) - .values("team_id") - .annotate(total=Count("id")) - .order_by("team_id") - ), - teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_bytes", - query_types=["hogql_query", "HogQLQuery"], - access_method="", - ), - teams_with_hogql_app_rows_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_rows", - query_types=["hogql_query", "HogQLQuery"], - access_method="", - ), - teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric( - period_start, - period_end, - metric="query_duration_ms", - query_types=["hogql_query", "HogQLQuery"], - access_method="", - ), - teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_bytes", - query_types=["hogql_query", "HogQLQuery"], - access_method="personal_api_key", - ), - teams_with_hogql_api_rows_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_rows", - query_types=["hogql_query", "HogQLQuery"], - access_method="personal_api_key", - ), - teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric( - period_start, - period_end, - metric="query_duration_ms", - query_types=["hogql_query", "HogQLQuery"], - access_method="personal_api_key", - ), - teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_bytes", - query_types=["EventsQuery"], - access_method="", - ), - teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_rows", - query_types=["EventsQuery"], - access_method="", - ), - teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric( - period_start, - period_end, - metric="query_duration_ms", - query_types=["EventsQuery"], - access_method="", - ), - teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_bytes", - query_types=["EventsQuery"], - access_method="personal_api_key", - ), - teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric( - period_start, - period_end, - metric="read_rows", - query_types=["EventsQuery"], - access_method="personal_api_key", - ), - teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric( - period_start, - period_end, - metric="query_duration_ms", - query_types=["EventsQuery"], - access_method="personal_api_key", - ), - ) - - # The data is all as raw rows which will dramatically slow down the upcoming loop - # so we convert it to a map of team_id -> value - for key, rows in all_data.items(): - all_data[key] = convert_team_usage_rows_to_dict(rows) - - teams: Sequence[Team] = list( - Team.objects.select_related("organization").exclude( - Q(organization__for_internal_metrics=True) | Q(is_demo=True) - ) - ) - - org_reports: Dict[str, OrgReport] = {} - - print("Generating reports for teams...") # noqa T201 - time_now = datetime.now() - for team in teams: - decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0) - decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0) - local_evaluation_requests_count_in_period = all_data[ - "teams_with_local_evaluation_requests_count_in_period" - ].get(team.id, 0) - local_evaluation_requests_count_in_month = all_data[ - "teams_with_local_evaluation_requests_count_in_month" - ].get(team.id, 0) - - team_report = UsageReportCounters( - event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0), - event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0), - event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0), - event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get( - team.id, 0 - ), - # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0), - # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0), - recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0), - recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0), - group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0), - decide_requests_count_in_period=decide_requests_count_in_period, - decide_requests_count_in_month=decide_requests_count_in_month, - local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period, - local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month, - billable_feature_flag_requests_count_in_month=decide_requests_count_in_month - + (local_evaluation_requests_count_in_month * 10), - billable_feature_flag_requests_count_in_period=decide_requests_count_in_period - + (local_evaluation_requests_count_in_period * 10), - dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0), - dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0), - dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0), - dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0), - ff_count=all_data["teams_with_ff_count"].get(team.id, 0), - ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0), - hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0), - hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0), - hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0), - hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0), - hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0), - hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0), - event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0), - event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0), - event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0), - event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0), - event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0), - event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0), - ) - - org_id = str(team.organization.id) - - if org_id not in org_reports: - org_report = OrgReport( - date=period_start.strftime("%Y-%m-%d"), - organization_id=org_id, - organization_name=team.organization.name, - organization_created_at=team.organization.created_at.isoformat(), - organization_user_count=get_org_user_count(org_id), - team_count=1, - teams={str(team.id): team_report}, - **dataclasses.asdict(team_report), # Clone the team report as the basis - ) - org_reports[org_id] = org_report - else: - org_report = org_reports[org_id] - org_report.teams[str(team.id)] = team_report - org_report.team_count += 1 - - # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report - for field in dataclasses.fields(UsageReportCounters): - if hasattr(team_report, field.name): - setattr( - org_report, - field.name, - getattr(org_report, field.name) + getattr(team_report, field.name), - ) - time_since = datetime.now() - time_now - print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201 - - all_reports = [] + org_reports = _get_all_org_reports(period_start, period_end) print("Sending usage reports to PostHog and Billing...") # noqa T201 time_now = datetime.now() @@ -803,12 +836,8 @@ def send_all_org_usage_reports( if only_organization_id and only_organization_id != org_id: continue - full_report = FullUsageReport( - **dataclasses.asdict(org_report), - **dataclasses.asdict(instance_metadata), - ) - full_report_dict = dataclasses.asdict(full_report) - all_reports.append(full_report_dict) + full_report = _get_full_org_usage_report(org_report, instance_metadata) + full_report_dict = _get_full_org_usage_report_as_dict(full_report) if dry_run: continue @@ -823,7 +852,6 @@ def send_all_org_usage_reports( send_report_to_billing_service.delay(org_id, full_report_dict) time_since = datetime.now() - time_now print(f"Sending usage reports to PostHog and Billing took {time_since.total_seconds()} seconds.") # noqa T201 - return all_reports except Exception as err: capture_exception(err) raise err diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 392534fc8999c..b4e51bc9f8b8e 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -3,11 +3,13 @@ import gzip import itertools import json +import os from random import randint from unittest import mock from uuid import uuid4 import boto3 +import botocore.exceptions import brotli import pytest from django.conf import settings @@ -40,6 +42,18 @@ TEST_ROOT_BUCKET = "test-batch-exports" + +def check_valid_credentials() -> bool: + """Check if there are valid AWS credentials in the environment.""" + sts = boto3.client("sts") + try: + sts.get_caller_identity() + except botocore.exceptions.ClientError: + return False + else: + return True + + create_test_client = functools.partial(boto3.client, endpoint_url=settings.OBJECT_STORAGE_ENDPOINT) @@ -422,6 +436,165 @@ async def test_s3_export_workflow_with_minio_bucket( assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events) +@pytest.mark.skipif( + "S3_TEST_BUCKET" not in os.environ or not check_valid_credentials(), + reason="AWS credentials not set in environment or missing S3_TEST_BUCKET variable", +) +@pytest.mark.django_db +@pytest.mark.asyncio +@pytest.mark.parametrize( + "interval,compression,encryption,exclude_events", + itertools.product(["hour", "day"], [None, "gzip", "brotli"], [None, "AES256", "aws:kms"], [None, ["test-exclude"]]), +) +async def test_s3_export_workflow_with_s3_bucket(interval, compression, encryption, exclude_events): + """Test S3 Export Workflow end-to-end by using an S3 bucket. + + The S3_TEST_BUCKET environment variable is used to set the name of the bucket for this test. + This test will be skipped if no valid AWS credentials exist, or if the S3_TEST_BUCKET environment + variable is not set. + + The workflow should update the batch export run status to completed and produce the expected + records to the S3 bucket. + """ + bucket_name = os.getenv("S3_TEST_BUCKET") + kms_key_id = os.getenv("S3_TEST_KMS_KEY_ID") + prefix = f"posthog-events-{str(uuid4())}" + destination_data = { + "type": "S3", + "config": { + "bucket_name": bucket_name, + "region": "us-east-1", + "prefix": prefix, + "aws_access_key_id": "object_storage_root_user", + "aws_secret_access_key": "object_storage_root_password", + "compression": compression, + "exclude_events": exclude_events, + "encryption": encryption, + "kms_key_id": kms_key_id if encryption == "aws:kms" else None, + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": interval, + } + + organization = await acreate_organization("test") + team = await acreate_team(organization=organization) + batch_export = await acreate_batch_export( + team_id=team.pk, + name=batch_export_data["name"], + destination_data=batch_export_data["destination"], + interval=batch_export_data["interval"], + ) + + events: list[EventValues] = [ + { + "uuid": str(uuid4()), + "event": "test", + "timestamp": "2023-04-25 13:30:00.000000", + "created_at": "2023-04-25 13:30:00.000000", + "inserted_at": "2023-04-25 13:30:00.000000", + "_timestamp": "2023-04-25 13:30:00", + "person_id": str(uuid4()), + "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "team_id": team.pk, + "properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "distinct_id": str(uuid4()), + "elements_chain": "this is a comman, separated, list, of css selectors(?)", + }, + { + "uuid": str(uuid4()), + "event": "test-exclude", + "timestamp": "2023-04-25 14:29:00.000000", + "created_at": "2023-04-25 14:29:00.000000", + "inserted_at": "2023-04-25 14:29:00.000000", + "_timestamp": "2023-04-25 14:29:00", + "person_id": str(uuid4()), + "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "team_id": team.pk, + "properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "distinct_id": str(uuid4()), + "elements_chain": "this is a comman, separated, list, of css selectors(?)", + }, + ] + + if interval == "day": + # Add an event outside the hour range but within the day range to ensure it's exported too. + events_outside_hour: list[EventValues] = [ + { + "uuid": str(uuid4()), + "event": "test", + "timestamp": "2023-04-25 00:30:00.000000", + "created_at": "2023-04-25 00:30:00.000000", + "inserted_at": "2023-04-25 00:30:00.000000", + "_timestamp": "2023-04-25 00:30:00", + "person_id": str(uuid4()), + "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "team_id": team.pk, + "properties": {"$browser": "Chrome", "$os": "Mac OS X"}, + "distinct_id": str(uuid4()), + "elements_chain": "this is a comman, separated, list, of css selectors(?)", + } + ] + events += events_outside_hour + + ch_client = ClickHouseClient( + url=settings.CLICKHOUSE_HTTP_URL, + user=settings.CLICKHOUSE_USER, + password=settings.CLICKHOUSE_PASSWORD, + database=settings.CLICKHOUSE_DATABASE, + ) + + # Insert some data into the `sharded_events` table. + await insert_events( + client=ch_client, + events=events, + ) + + workflow_id = str(uuid4()) + inputs = S3BatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + interval=interval, + **batch_export.destination.config, + ) + + s3_client = boto3.client("s3") + + def create_s3_client(*args, **kwargs): + """Mock function to return an already initialized S3 client.""" + return s3_client + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[S3BatchExportWorkflow], + activities=[create_export_run, insert_into_s3_activity, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with mock.patch("posthog.temporal.workflows.s3_batch_export.boto3.client", side_effect=create_s3_client): + await activity_environment.client.execute_workflow( + S3BatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + execution_timeout=dt.timedelta(seconds=10), + ) + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Completed" + + assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events) + + @pytest.mark.django_db @pytest.mark.asyncio @pytest.mark.parametrize("compression", [None, "gzip"]) diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py index a396f361b77c5..b81c7496b3adb 100644 --- a/posthog/temporal/workflows/postgres_batch_export.py +++ b/posthog/temporal/workflows/postgres_batch_export.py @@ -58,9 +58,10 @@ def copy_tsv_to_postgres(tsv_file, postgres_connection, schema: str, table_name: tsv_file.seek(0) with postgres_connection.cursor() as cursor: + cursor.execute(sql.SQL("SET search_path TO {schema}").format(schema=sql.Identifier(schema))) cursor.copy_from( tsv_file, - sql.Identifier(schema, table_name).as_string(postgres_connection), + table_name, null="", columns=schema_columns, ) @@ -245,7 +246,11 @@ async def run(self, inputs: PostgresBatchExportInputs): initial_interval=dt.timedelta(seconds=10), maximum_interval=dt.timedelta(seconds=120), maximum_attempts=10, - non_retryable_error_types=[], + non_retryable_error_types=[ + # Raised on errors that are related to database operation. + # For example: unexpected disconnect, database or other object not found. + "OperationalError" + ], ), ) diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py index 028b6f422e26f..13bbf183e5d06 100644 --- a/posthog/temporal/workflows/s3_batch_export.py +++ b/posthog/temporal/workflows/s3_batch_export.py @@ -85,15 +85,20 @@ class S3MultiPartUploadState(typing.NamedTuple): parts: list[dict[str, str | int]] +Part = dict[str, str | int] + + class S3MultiPartUpload: """An S3 multi-part upload.""" - def __init__(self, s3_client, bucket_name, key): + def __init__(self, s3_client, bucket_name: str, key: str, encryption: str | None, kms_key_id: str | None): self.s3_client = s3_client self.bucket_name = bucket_name self.key = key - self.upload_id = None - self.parts = [] + self.encryption = encryption + self.kms_key_id = kms_key_id + self.upload_id: str | None = None + self.parts: list[Part] = [] def to_state(self) -> S3MultiPartUploadState: """Produce state tuple that can be used to resume this S3MultiPartUpload.""" @@ -119,10 +124,21 @@ def start(self) -> str: if self.is_upload_in_progress() is True: raise UploadAlreadyInProgressError(self.upload_id) - multipart_response = self.s3_client.create_multipart_upload(Bucket=self.bucket_name, Key=self.key) - self.upload_id = multipart_response["UploadId"] + optional_kwargs = {} + if self.encryption: + optional_kwargs["ServerSideEncryption"] = self.encryption + if self.kms_key_id: + optional_kwargs["SSEKMSKeyId"] = self.kms_key_id - return self.upload_id + multipart_response = self.s3_client.create_multipart_upload( + Bucket=self.bucket_name, + Key=self.key, + **optional_kwargs, + ) + upload_id: str = multipart_response["UploadId"] + self.upload_id = upload_id + + return upload_id def continue_from_state(self, state: S3MultiPartUploadState): """Continue this S3MultiPartUpload from a previous state.""" @@ -230,6 +246,8 @@ class S3InsertInputs: aws_secret_access_key: str | None = None compression: str | None = None exclude_events: list[str] | None = None + encryption: str | None = None + kms_key_id: str | None = None def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]: @@ -241,7 +259,7 @@ def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3Mu aws_access_key_id=inputs.aws_access_key_id, aws_secret_access_key=inputs.aws_secret_access_key, ) - s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key) + s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key, inputs.encryption, inputs.kms_key_id) details = activity.info().heartbeat_details @@ -442,6 +460,8 @@ async def run(self, inputs: S3BatchExportInputs): data_interval_end=data_interval_end.isoformat(), compression=inputs.compression, exclude_events=inputs.exclude_events, + encryption=inputs.encryption, + kms_key_id=inputs.kms_key_id, ) try: await workflow.execute_activity( diff --git a/posthog/test/test_datetime.py b/posthog/test/test_datetime.py new file mode 100644 index 0000000000000..b25fa7098f9b5 --- /dev/null +++ b/posthog/test/test_datetime.py @@ -0,0 +1,33 @@ +from datetime import datetime, timezone + +from posthog.datetime import start_of_hour, start_of_day, end_of_day, start_of_week, start_of_month + + +def test_start_of_hour(): + assert start_of_hour(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat( + "2023-02-08T12:00:00+00:00" + ) + + +def test_start_of_day(): + assert start_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat( + "2023-02-08T00:00:00+00:00" + ) + + +def test_end_of_day(): + assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime( + 2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc + ) + + +def test_start_of_week(): + assert start_of_week(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat( + "2023-02-05T00:00:00+00:00" + ) + + +def test_start_of_month(): + assert start_of_month(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat( + "2023-02-01T00:00:00+00:00" + ) diff --git a/posthog/test/test_decorators.py b/posthog/test/test_decorators.py index 9cbb181c3f261..a6bc176072377 100644 --- a/posthog/test/test_decorators.py +++ b/posthog/test/test_decorators.py @@ -1,12 +1,18 @@ -from posthog.decorators import cached_by_filters +from datetime import datetime +from freezegun import freeze_time +from posthog.decorators import cached_by_filters, is_stale from django.core.cache import cache from rest_framework.test import APIRequestFactory from rest_framework.viewsets import GenericViewSet from rest_framework.response import Response +from posthog.models.filters.filter import Filter +from posthog.models.filters.path_filter import PathFilter +from posthog.models.filters.retention_filter import RetentionFilter +from posthog.models.filters.stickiness_filter import StickinessFilter -from posthog.test.base import APIBaseTest +from posthog.test.base import APIBaseTest, BaseTest from posthog.api import router factory = APIRequestFactory() @@ -22,7 +28,7 @@ def calculate_with_filters(self, request): return {"result": "bla"} -class TestDecorators(APIBaseTest): +class TestCachedByFiltersDecorator(APIBaseTest): def setUp(self) -> None: cache.clear() @@ -61,3 +67,161 @@ def test_cache_bypass_with_invalidation_key_param(self) -> None: response = self.client.get(f"/api/dummy", data={"cache_invalidation_key": "abc"}).json() assert response["is_cached"] is False + + def test_discards_stale_response(self) -> None: + with freeze_time("2023-02-08T12:05:23Z"): + # cache the result + self.client.get(f"/api/dummy").json() + + with freeze_time("2023-02-10T12:00:00Z"): + # we don't need to add filters, since -7d with a + # daily interval is the default + response = self.client.get(f"/api/dummy").json() + assert response["is_cached"] is False + + +class TestIsStaleHelper(BaseTest): + cached_response = {"last_refresh": datetime.fromisoformat("2023-02-08T12:05:23+00:00"), "result": "bla"} + + def test_keeps_fresh_hourly_result(self) -> None: + with freeze_time("2023-02-08T12:59:59Z"): + filter = Filter(data={"interval": "hour"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_hourly_result(self) -> None: + with freeze_time("2023-02-08T13:00:00Z"): + filter = Filter(data={"interval": "hour"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_daily_result(self) -> None: + with freeze_time("2023-02-08T23:59:59Z"): + filter = Filter(data={"interval": "day"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_daily_result(self) -> None: + with freeze_time("2023-02-09T00:00:00Z"): + filter = Filter(data={"interval": "day"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_weekly_result(self) -> None: + with freeze_time("2023-02-11T23:59:59Z"): + filter = Filter(data={"interval": "week"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_weekly_result(self) -> None: + with freeze_time("2023-02-12T00:00:00Z"): + filter = Filter(data={"interval": "week"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_monthly_result(self) -> None: + with freeze_time("2023-02-28T23:59:59Z"): + filter = Filter(data={"interval": "month"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_monthly_result(self) -> None: + with freeze_time("2023-03-01T00:00:00Z"): + filter = Filter(data={"interval": "month"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_result_from_fixed_range(self) -> None: + filter = Filter(data={"interval": "day", "date_from": "2000-01-01", "date_to": "2000-01-10"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_keeps_fresh_result_with_date_to_in_future(self) -> None: + with freeze_time("2023-02-08T23:59:59Z"): + filter = Filter(data={"interval": "day", "date_to": "2999-01-01"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_keeps_fresh_stickiness_result(self) -> None: + with freeze_time("2023-02-08T23:59:59Z"): + filter = StickinessFilter(data={}, team=self.team) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_stickiness_result(self) -> None: + with freeze_time("2023-02-09T00:00:00Z"): + filter = StickinessFilter(data={}, team=self.team) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_path_result(self) -> None: + with freeze_time("2023-02-08T23:59:59Z"): + filter = PathFilter() + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_path_result(self) -> None: + with freeze_time("2023-02-09T00:00:00Z"): + filter = PathFilter() + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_retention_hourly_result(self) -> None: + with freeze_time("2023-02-08T12:59:59Z"): + filter = RetentionFilter(data={"period": "Hour"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_retention_hourly_result(self) -> None: + with freeze_time("2023-02-08T13:00:00Z"): + filter = RetentionFilter(data={"period": "Hour"}) + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True + + def test_keeps_fresh_retention_result(self) -> None: + with freeze_time("2023-02-08T23:59:59Z"): + filter = RetentionFilter() + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is False + + def test_discards_stale_retention_result(self) -> None: + with freeze_time("2023-02-09T00:00:00Z"): + filter = RetentionFilter() + + stale = is_stale(self.team, filter, self.cached_response) + + assert stale is True diff --git a/posthog/utils.py b/posthog/utils.py index ddbb32bfb2a5a..5e605415ebc3e 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -28,11 +28,11 @@ cast, ) from urllib.parse import urljoin, urlparse -from zoneinfo import ZoneInfo import lzstring import posthoganalytics import pytz +from zoneinfo import ZoneInfo import structlog from celery.schedules import crontab from dateutil import parser @@ -128,13 +128,13 @@ def get_previous_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.d period_end: datetime.datetime = datetime.datetime.combine( at - datetime.timedelta(days=1), datetime.time.max, - tzinfo=pytz.UTC, + tzinfo=ZoneInfo("UTC"), ) # very end of the previous day period_start: datetime.datetime = datetime.datetime.combine( period_end, datetime.time.min, - tzinfo=pytz.UTC, + tzinfo=ZoneInfo("UTC"), ) # very start of the previous day return (period_start, period_end) @@ -152,13 +152,13 @@ def get_current_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.da period_end: datetime.datetime = datetime.datetime.combine( at, datetime.time.max, - tzinfo=pytz.UTC, + tzinfo=ZoneInfo("UTC"), ) # very end of the reference day period_start: datetime.datetime = datetime.datetime.combine( period_end, datetime.time.min, - tzinfo=pytz.UTC, + tzinfo=ZoneInfo("UTC"), ) # very start of the reference day return (period_start, period_end) @@ -1087,7 +1087,7 @@ def cast_timestamp_or_now(timestamp: Optional[Union[timezone.datetime, str]]) -> if isinstance(timestamp, str): timestamp = parser.isoparse(timestamp) else: - timestamp = timestamp.astimezone(pytz.utc) + timestamp = timestamp.astimezone(ZoneInfo("UTC")) return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f") diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index 10e61444e8250..dcb1b2297216f 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -8,6 +8,7 @@ StringDatabaseField, IntegerDatabaseField, DateTimeDatabaseField, + DateDatabaseField, StringJSONDatabaseField, BooleanDatabaseField, StringArrayDatabaseField, @@ -20,6 +21,7 @@ "String": StringDatabaseField, "DateTime64": DateTimeDatabaseField, "DateTime32": DateTimeDatabaseField, + "Date": DateDatabaseField, "UInt8": IntegerDatabaseField, "UInt16": IntegerDatabaseField, "UInt32": IntegerDatabaseField, @@ -47,6 +49,7 @@ class DataWarehouseTable(CreatedMetaFields, UUIDModel, DeletedMetaFields): class TableFormat(models.TextChoices): CSV = "CSV", "CSV" Parquet = "Parquet", "Parquet" + JSON = "JSONEachRow", "JSON" name: models.CharField = models.CharField(max_length=128) format: models.CharField = models.CharField(max_length=128, choices=TableFormat.choices) diff --git a/requirements.in b/requirements.in index 0fecfd58aad1f..09b47a4c2aa95 100644 --- a/requirements.in +++ b/requirements.in @@ -9,7 +9,7 @@ antlr4-python3-runtime==4.13.0 amqp==2.6.0 boto3==1.26.66 boto3-stubs[s3] -brotli==1.0.9 +brotli==1.1.0 celery==4.4.7 celery-redbeat==2.0.0 clickhouse-driver==0.2.4 @@ -55,7 +55,7 @@ pickleshare==0.7.5 Pillow==9.2.0 posthoganalytics==3.0.1 prance==0.22.2.22.0 -psycopg2-binary==2.8.6 +psycopg2-binary==2.9.7 pyarrow==12.0.1 pydantic==1.10.4 pyjwt==2.4.0 @@ -78,7 +78,7 @@ temporalio==1.1.0 token-bucket==0.3.0 toronado==0.1.0 webdriver_manager==3.8.5 -whitenoise==5.2.0 +whitenoise==6.5.0 mimesis==5.2.1 more-itertools==9.0.0 django-two-factor-auth==1.14.0 diff --git a/requirements.txt b/requirements.txt index 972c09cea5b08..4fac42e90302a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -51,8 +51,10 @@ botocore==1.29.66 # s3transfer botocore-stubs==1.29.130 # via boto3-stubs -brotli==1.0.9 +brotli==1.1.0 # via -r requirements.in +cachetools==5.3.1 + # via google-auth celery==4.4.7 # via # -r requirements.in @@ -319,7 +321,7 @@ protobuf==4.22.1 # grpcio-status # proto-plus # temporalio -psycopg2-binary==2.8.6 +psycopg2-binary==2.9.7 # via -r requirements.in ptyprocess==0.6.0 # via pexpect @@ -507,7 +509,7 @@ vine==1.3.0 # celery webdriver-manager==3.8.5 # via -r requirements.in -whitenoise==5.2.0 +whitenoise==6.5.0 # via -r requirements.in wsproto==1.1.0 # via trio-websocket diff --git a/runtime.txt b/runtime.txt deleted file mode 100644 index 97691386f3a6e..0000000000000 --- a/runtime.txt +++ /dev/null @@ -1 +0,0 @@ -python-3.10.10