diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml index 8bb8380ad5a93..c4e672c560c86 100644 --- a/.github/workflows/container-images-cd.yml +++ b/.github/workflows/container-images-cd.yml @@ -55,9 +55,9 @@ jobs: uses: aws-actions/amazon-ecr-login@v2 - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: - username: posthog + username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push container image diff --git a/.github/workflows/copy-clickhouse-udfs.yml b/.github/workflows/copy-clickhouse-udfs.yml index c6862b0345c67..3dc6fce3ade07 100644 --- a/.github/workflows/copy-clickhouse-udfs.yml +++ b/.github/workflows/copy-clickhouse-udfs.yml @@ -1,21 +1,20 @@ name: Trigger UDFs Workflow on: - push: - branches: - - master - paths: - - 'posthog/user_scripts/**' + push: + branches: + - master + paths: + - 'posthog/user_scripts/**' jobs: - trigger_udfs_workflow: - runs-on: ubuntu-latest - steps: - - name: Trigger UDFs Workflow - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: .github/workflows/clickhouse-udfs.yml - repo: posthog/posthog-cloud-infra - token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} - ref: refs/heads/main - + trigger_udfs_workflow: + runs-on: ubuntu-latest + steps: + - name: Trigger UDFs Workflow + uses: benc-uk/workflow-dispatch@v1 + with: + workflow: .github/workflows/clickhouse-udfs.yml + repo: posthog/posthog-cloud-infra + token: ${{ secrets.POSTHOG_BOT_GITHUB_TOKEN }} + ref: refs/heads/main diff --git a/.github/workflows/pr-deploy.yml b/.github/workflows/pr-deploy.yml index 740ac52954bc2..26896c7566091 100644 --- a/.github/workflows/pr-deploy.yml +++ b/.github/workflows/pr-deploy.yml @@ -33,7 +33,7 @@ jobs: - name: Login to DockerHub uses: docker/login-action@v3 with: - username: posthog + username: ${{ secrets.DOCKERHUB_USER }} password: ${{ secrets.DOCKERHUB_TOKEN }} - uses: aws-actions/configure-aws-credentials@v4 diff --git a/.storybook/decorators/withKea/kea-story.tsx b/.storybook/decorators/withKea/kea-story.tsx index 3525a4befbe0d..0e04d991a82fb 100644 --- a/.storybook/decorators/withKea/kea-story.tsx +++ b/.storybook/decorators/withKea/kea-story.tsx @@ -8,6 +8,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { worker } from '~/mocks/browser' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' +import { projectLogic } from 'scenes/projectLogic' export function resetKeaStory(): void { worker.resetHandlers() @@ -18,6 +19,7 @@ export function resetKeaStory(): void { initKea({ routerLocation: history.location, routerHistory: history }) featureFlagLogic.mount() teamLogic.mount() + projectLogic.mount() userLogic.mount() router.mount() const { store } = getContext() diff --git a/bin/copy-posthog-js b/bin/copy-posthog-js index f57e2841bb784..24bc26c761f1b 100755 --- a/bin/copy-posthog-js +++ b/bin/copy-posthog-js @@ -6,6 +6,7 @@ set -e cp node_modules/posthog-js/dist/array.js* frontend/dist/ cp node_modules/posthog-js/dist/array.full.js* frontend/dist/ +cp node_modules/posthog-js/dist/array.full.es5.js* frontend/dist/ cp node_modules/posthog-js/dist/recorder.js* frontend/dist/ cp node_modules/posthog-js/dist/recorder-v2.js* frontend/dist/ cp node_modules/posthog-js/dist/surveys.js* frontend/dist/ diff --git a/cypress/e2e/alerts.cy.ts b/cypress/e2e/alerts.cy.ts index b55f1e09b9494..bd6ca01bcb734 100644 --- a/cypress/e2e/alerts.cy.ts +++ b/cypress/e2e/alerts.cy.ts @@ -16,7 +16,8 @@ describe('Alerts', () => { const createAlert = ( name: string = 'Alert name', lowerThreshold: string = '100', - upperThreshold: string = '200' + upperThreshold: string = '200', + condition?: string ): void => { cy.get('[data-attr=more-button]').click() cy.contains('Manage alerts').click() @@ -24,6 +25,13 @@ describe('Alerts', () => { cy.get('[data-attr=alertForm-name]').clear().type(name) cy.get('[data-attr=subscribed-users').click().type('{downarrow}{enter}') + + if (condition) { + cy.get('[data-attr=alertForm-condition').click() + cy.contains(condition).click() + cy.contains('%').click() + } + cy.get('[data-attr=alertForm-lower-threshold').clear().type(lowerThreshold) cy.get('[data-attr=alertForm-upper-threshold').clear().type(upperThreshold) cy.contains('Create alert').click() @@ -39,7 +47,6 @@ describe('Alerts', () => { cy.get('[data-attr=insight-edit-button]').click() cy.get('[data-attr=chart-filter]').click() cy.contains(displayType).click() - cy.get('.insight-empty-state').should('not.exist') cy.get('[data-attr=insight-save-button]').contains('Save').click() cy.url().should('not.include', '/edit') } @@ -69,7 +76,7 @@ describe('Alerts', () => { }) it('Should warn about an alert deletion', () => { - setInsightDisplayTypeAndSave('Number') + setInsightDisplayTypeAndSave('Area chart') createAlert('Alert to be deleted because of a changed insight') @@ -90,4 +97,28 @@ describe('Alerts', () => { cy.contains('Manage alerts').click() cy.contains('Alert to be deleted because of a changed insight').should('not.exist') }) + + it('Should allow create and delete a relative alert', () => { + cy.get('[data-attr=more-button]').click() + // Alerts should be disabled for trends represented with graphs + cy.get('[data-attr=manage-alerts-button]').should('have.attr', 'aria-disabled', 'true') + + setInsightDisplayTypeAndSave('Bar chart') + + createAlert('Alert name', '10', '20', 'increases by') + cy.reload() + + // Check the alert has the same values as when it was created + cy.get('[data-attr=more-button]').click() + cy.contains('Manage alerts').click() + cy.get('[data-attr=alert-list-item]').contains('Alert name').click() + cy.get('[data-attr=alertForm-name]').should('have.value', 'Alert name') + cy.get('[data-attr=alertForm-lower-threshold').should('have.value', '10') + cy.get('[data-attr=alertForm-upper-threshold').should('have.value', '20') + cy.contains('Delete alert').click() + cy.wait(2000) + + cy.reload() + cy.contains('Alert name').should('not.exist') + }) }) diff --git a/cypress/e2e/billingUpgradeCTA.cy.ts b/cypress/e2e/billingUpgradeCTA.cy.ts index 17d26463a3350..1eabae675aee5 100644 --- a/cypress/e2e/billingUpgradeCTA.cy.ts +++ b/cypress/e2e/billingUpgradeCTA.cy.ts @@ -21,6 +21,6 @@ describe('Billing Upgrade CTA', () => { cy.reload() cy.get('[data-attr=billing-page-core-upgrade-cta] .LemonButton__content').should('not.exist') - cy.get('[data-attr=manage-billing]').should('have.text', 'Manage card details and view past invoices') + cy.get('[data-attr=manage-billing]').should('have.text', 'Manage card details and invoices') }) }) diff --git a/cypress/e2e/dashboard-duplication.ts b/cypress/e2e/dashboard-duplication.ts index 75c1dea0c3998..e19bbd1ad046f 100644 --- a/cypress/e2e/dashboard-duplication.ts +++ b/cypress/e2e/dashboard-duplication.ts @@ -7,7 +7,7 @@ describe('duplicating dashboards', () => { let dashboardName, insightName, expectedCopiedDashboardName, expectedCopiedInsightName beforeEach(() => { - cy.intercept('POST', /\/api\/projects\/\d+\/dashboards/).as('createDashboard') + cy.intercept('POST', /\/api\/environments\/\d+\/dashboards/).as('createDashboard') dashboardName = randomString('dashboard-') expectedCopiedDashboardName = `${dashboardName} (Copy)` diff --git a/cypress/e2e/dashboard-shared.cy.ts b/cypress/e2e/dashboard-shared.cy.ts index 4e46554160424..755297f5c52dd 100644 --- a/cypress/e2e/dashboard-shared.cy.ts +++ b/cypress/e2e/dashboard-shared.cy.ts @@ -2,9 +2,9 @@ import { dashboards } from '../productAnalytics' describe('Shared dashboard', () => { beforeEach(() => { - cy.intercept('GET', /api\/projects\/\d+\/insights\/\?.*/).as('loadInsightList') - cy.intercept('PATCH', /api\/projects\/\d+\/insights\/\d+\/.*/).as('patchInsight') - cy.intercept('POST', /\/api\/projects\/\d+\/dashboards/).as('createDashboard') + cy.intercept('GET', /api\/environments\/\d+\/insights\/\?.*/).as('loadInsightList') + cy.intercept('PATCH', /api\/environments\/\d+\/insights\/\d+\/.*/).as('patchInsight') + cy.intercept('POST', /\/api\/environments\/\d+\/dashboards/).as('createDashboard') cy.useSubscriptionStatus('unsubscribed') cy.clickNavMenu('dashboards') diff --git a/cypress/e2e/dashboard.cy.ts b/cypress/e2e/dashboard.cy.ts index 954bc390759d3..b5f62097ebee1 100644 --- a/cypress/e2e/dashboard.cy.ts +++ b/cypress/e2e/dashboard.cy.ts @@ -3,9 +3,9 @@ import { randomString } from '../support/random' describe('Dashboard', () => { beforeEach(() => { - cy.intercept('GET', /api\/projects\/\d+\/insights\/\?.*/).as('loadInsightList') - cy.intercept('PATCH', /api\/projects\/\d+\/insights\/\d+\/.*/).as('patchInsight') - cy.intercept('POST', /\/api\/projects\/\d+\/dashboards/).as('createDashboard') + cy.intercept('GET', /api\/environments\/\d+\/insights\/\?.*/).as('loadInsightList') + cy.intercept('PATCH', /api\/environments\/\d+\/insights\/\d+\/.*/).as('patchInsight') + cy.intercept('POST', /\/api\/environments\/\d+\/dashboards/).as('createDashboard') cy.clickNavMenu('dashboards') cy.location('pathname').should('include', '/dashboard') @@ -306,7 +306,7 @@ describe('Dashboard', () => { }) it('Move dashboard item', () => { - cy.intercept('PATCH', /api\/projects\/\d+\/dashboards\/\d+\/move_tile.*/).as('moveTile') + cy.intercept('PATCH', /api\/environments\/\d+\/dashboards\/\d+\/move_tile.*/).as('moveTile') const sourceDashboard = randomString('source-dashboard') const targetDashboard = randomString('target-dashboard') diff --git a/cypress/e2e/insights-saved.cy.ts b/cypress/e2e/insights-saved.cy.ts index c5a498edf63af..748c0984543f3 100644 --- a/cypress/e2e/insights-saved.cy.ts +++ b/cypress/e2e/insights-saved.cy.ts @@ -30,7 +30,7 @@ describe('Insights - saved', () => { }) it('If cache empty, initiate async refresh', () => { - cy.intercept('GET', /\/api\/projects\/\d+\/insights\/?\?[^/]*?refresh=async/).as('getInsightsRefreshAsync') + cy.intercept('GET', /\/api\/environments\/\d+\/insights\/?\?[^/]*?refresh=async/).as('getInsightsRefreshAsync') let newInsightId: string createInsight('saved insight').then((insightId) => { newInsightId = insightId diff --git a/cypress/e2e/insights.cy.ts b/cypress/e2e/insights.cy.ts index e7e05fa4e0491..a7dc8924c6e09 100644 --- a/cypress/e2e/insights.cy.ts +++ b/cypress/e2e/insights.cy.ts @@ -53,7 +53,7 @@ describe('Insights', () => { }) it('Create new insight and save and continue editing', () => { - cy.intercept('PATCH', /\/api\/projects\/\d+\/insights\/\d+\/?/).as('patchInsight') + cy.intercept('PATCH', /\/api\/environments\/\d+\/insights\/\d+\/?/).as('patchInsight') const insightName = randomString('insight-name-') createInsight(insightName) diff --git a/cypress/e2e/notebooks-insights.ts b/cypress/e2e/notebooks-insights.ts new file mode 100644 index 0000000000000..0b007744576c6 --- /dev/null +++ b/cypress/e2e/notebooks-insights.ts @@ -0,0 +1,18 @@ +import { insight, savedInsights } from '../productAnalytics' + +describe('Notebooks', () => { + beforeEach(() => { + cy.clickNavMenu('notebooks') + cy.location('pathname').should('include', '/notebooks') + }) + ;['SQL', 'TRENDS', 'FUNNELS', 'RETENTION', 'PATHS', 'STICKINESS', 'LIFECYCLE'].forEach((insightType) => { + it(`Can add a ${insightType} insight`, () => { + savedInsights.createNewInsightOfType(insightType) + insight.editName(`${insightType} Insight`) + insight.save() + cy.get('[data-attr="notebooks-add-button"]').click() + cy.get('[data-attr="notebooks-select-button-create"]').click() + cy.get('.ErrorBoundary').should('not.exist') + }) + }) +}) diff --git a/cypress/e2e/notebooks.cy.ts b/cypress/e2e/notebooks.cy.ts index 36c81378d2667..3022d621ba63d 100644 --- a/cypress/e2e/notebooks.cy.ts +++ b/cypress/e2e/notebooks.cy.ts @@ -3,13 +3,13 @@ import { urls } from 'scenes/urls' describe('Notebooks', () => { beforeEach(() => { cy.fixture('api/session-recordings/recordings.json').then((recordings) => { - cy.intercept('GET', /api\/projects\/\d+\/session_recordings\/?\?.*/, { body: recordings }).as( + cy.intercept('GET', /api\/environments\/\d+\/session_recordings\/?\?.*/, { body: recordings }).as( 'loadSessionRecordingsList' ) }) cy.fixture('api/session-recordings/recording.json').then((recording) => { - cy.intercept('GET', /api\/projects\/\d+\/session_recordings\/.*\?.*/, { body: recording }).as( + cy.intercept('GET', /api\/environments\/\d+\/session_recordings\/.*\?.*/, { body: recording }).as( 'loadSessionRecording' ) }) diff --git a/cypress/e2e/projectHomepage.cy.ts b/cypress/e2e/projectHomepage.cy.ts index 069041d039435..6d88abfe24c44 100644 --- a/cypress/e2e/projectHomepage.cy.ts +++ b/cypress/e2e/projectHomepage.cy.ts @@ -1,6 +1,6 @@ describe('Project Homepage', () => { beforeEach(() => { - cy.intercept('GET', /\/api\/projects\/\d+\/dashboards\/\d+\//).as('getDashboard') + cy.intercept('GET', /\/api\/environments\/\d+\/dashboards\/\d+\//).as('getDashboard') cy.clickNavMenu('projecthomepage') }) diff --git a/cypress/e2e/trends.cy.ts b/cypress/e2e/trends.cy.ts index ce8a6e8574b30..9f6d45236520a 100644 --- a/cypress/e2e/trends.cy.ts +++ b/cypress/e2e/trends.cy.ts @@ -6,7 +6,7 @@ describe('Trends', () => { }) it('Can load a graph from a URL directly', () => { - cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') + cy.intercept('POST', /api\/environments\/\d+\/query\//).as('loadNewQueryInsight') // regression test, the graph wouldn't load when going directly to a URL cy.visit( diff --git a/cypress/productAnalytics/index.ts b/cypress/productAnalytics/index.ts index 46344f78a627a..0fc9972014116 100644 --- a/cypress/productAnalytics/index.ts +++ b/cypress/productAnalytics/index.ts @@ -42,7 +42,7 @@ export const insight = { cy.url().should('not.include', '/new') }, clickTab: (tabName: string): void => { - cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') + cy.intercept('POST', /api\/environments\/\d+\/query\//).as('loadNewQueryInsight') cy.get(`[data-attr="insight-${(tabName === 'PATHS' ? 'PATH' : tabName).toLowerCase()}-tab"]`).click() if (tabName !== 'FUNNELS') { @@ -51,7 +51,7 @@ export const insight = { } }, newInsight: (insightType: string = 'TRENDS'): void => { - cy.intercept('POST', /api\/projects\/\d+\/query\//).as('loadNewQueryInsight') + cy.intercept('POST', /api\/environments\/\d+\/query\//).as('loadNewQueryInsight') if (insightType === 'JSON') { cy.clickNavMenu('savedinsights') @@ -86,7 +86,7 @@ export const insight = { cy.url().should('not.include', '/new') // wait for insight to complete and update URL }, addInsightToDashboard: (dashboardName: string, options: { visitAfterAdding: boolean }): void => { - cy.intercept('PATCH', /api\/projects\/\d+\/insights\/\d+\/.*/).as('patchInsight') + cy.intercept('PATCH', /api\/environments\/\d+\/insights\/\d+\/.*/).as('patchInsight') cy.get('[data-attr="save-to-dashboard-button"]').click() cy.get('[data-attr="dashboard-searchfield"]').type(dashboardName) @@ -158,7 +158,7 @@ export const dashboards = { export const dashboard = { addInsightToEmptyDashboard: (insightName: string): void => { - cy.intercept('POST', /api\/projects\/\d+\/insights\//).as('postInsight') + cy.intercept('POST', /api\/environments\/\d+\/insights\//).as('postInsight') cy.get('[data-attr=dashboard-add-graph-header]').contains('Add insight').click() cy.get('[data-attr=toast-close-button]').click({ multiple: true }) diff --git a/cypress/support/e2e.ts b/cypress/support/e2e.ts index 3b3d74a2287ea..fe164bf074b3a 100644 --- a/cypress/support/e2e.ts +++ b/cypress/support/e2e.ts @@ -101,7 +101,7 @@ beforeEach(() => { req.reply({ statusCode: 404, body: 'Cypress forced 404' }) ) - cy.intercept('GET', /\/api\/projects\/\d+\/insights\/?\?/).as('getInsights') + cy.intercept('GET', /\/api\/environments\/\d+\/insights\/?\?/).as('getInsights') cy.request('POST', '/api/login/', { email: 'test@posthog.com', diff --git a/docker/clickhouse/user_defined_function.xml b/docker/clickhouse/user_defined_function.xml index b8fac26d1887c..b48169884a53f 100644 --- a/docker/clickhouse/user_defined_function.xml +++ b/docker/clickhouse/user_defined_function.xml @@ -138,7 +138,7 @@ executable_pool aggregate_funnel_trends - Array(Tuple(UInt64, Int8, Nullable(String))) + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result UInt8 @@ -169,7 +169,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) value JSONEachRow @@ -181,7 +181,7 @@ executable_pool aggregate_funnel_array_trends - Array(Tuple(UInt64, Int8, Array(String))) + Array(Tuple(UInt64, Int8, Array(String), UUID)) result UInt8 @@ -208,7 +208,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow @@ -220,7 +220,7 @@ executable_pool aggregate_funnel_cohort_trends - Array(Tuple(UInt64, Int8, UInt64)) + Array(Tuple(UInt64, Int8, UInt64, UUID)) result UInt8 @@ -247,7 +247,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) value JSONEachRow @@ -285,7 +285,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow diff --git a/ee/api/test/__snapshots__/test_organization_resource_access.ambr b/ee/api/test/__snapshots__/test_organization_resource_access.ambr index 900c97084a746..d3c0c4dfe421f 100644 --- a/ee/api/test/__snapshots__/test_organization_resource_access.ambr +++ b/ee/api/test/__snapshots__/test_organization_resource_access.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -179,7 +179,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -191,6 +190,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/ee/clickhouse/queries/experiments/__init__.py b/ee/clickhouse/queries/experiments/__init__.py index 5d14ceca45be5..89f00352014b5 100644 --- a/ee/clickhouse/queries/experiments/__init__.py +++ b/ee/clickhouse/queries/experiments/__init__.py @@ -11,3 +11,5 @@ # Trends only: If p-value is below this threshold, the results are considered significant P_VALUE_SIGNIFICANCE_LEVEL = 0.05 + +CONTROL_VARIANT_KEY = "control" diff --git a/ee/clickhouse/queries/experiments/funnel_experiment_result.py b/ee/clickhouse/queries/experiments/funnel_experiment_result.py index 0ec82f553d6d2..e311657cc52c7 100644 --- a/ee/clickhouse/queries/experiments/funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/funnel_experiment_result.py @@ -6,9 +6,9 @@ from rest_framework.exceptions import ValidationError -from posthog.constants import ExperimentSignificanceCode, ExperimentNoResultsErrorKeys +from posthog.constants import ExperimentNoResultsErrorKeys from posthog.hogql_queries.experiments import CONTROL_VARIANT_KEY -from posthog.hogql_queries.experiments.funnel_statistics import ( +from posthog.hogql_queries.experiments.funnels_statistics import ( are_results_significant, calculate_credible_intervals, calculate_probabilities, @@ -17,6 +17,7 @@ from posthog.models.filters.filter import Filter from posthog.models.team import Team from posthog.queries.funnels import ClickhouseFunnel +from posthog.schema import ExperimentSignificanceCode Probability = float diff --git a/ee/clickhouse/queries/experiments/test_funnel_experiment_result.py b/ee/clickhouse/queries/experiments/test_funnel_experiment_result.py index 4e3ef4373f520..55fca255ed9ca 100644 --- a/ee/clickhouse/queries/experiments/test_funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/test_funnel_experiment_result.py @@ -4,14 +4,13 @@ from flaky import flaky -from posthog.constants import ExperimentSignificanceCode -from posthog.hogql_queries.experiments.funnel_statistics import ( +from posthog.hogql_queries.experiments.funnels_statistics import ( are_results_significant, calculate_expected_loss, calculate_probabilities, calculate_credible_intervals as calculate_funnel_credible_intervals, ) -from posthog.schema import ExperimentVariantFunnelResult +from posthog.schema import ExperimentSignificanceCode, ExperimentVariantFunnelsBaseStats Probability = float @@ -26,7 +25,7 @@ def logbeta(x: int, y: int) -> float: def calculate_probability_of_winning_for_target( - target_variant: ExperimentVariantFunnelResult, other_variants: list[ExperimentVariantFunnelResult] + target_variant: ExperimentVariantFunnelsBaseStats, other_variants: list[ExperimentVariantFunnelsBaseStats] ) -> Probability: """ Calculates the probability of winning for target variant. @@ -147,8 +146,8 @@ def probability_D_beats_A_B_and_C( @flaky(max_runs=10, min_passes=1) class TestFunnelExperimentCalculator(unittest.TestCase): def test_calculate_results(self): - variant_test = ExperimentVariantFunnelResult(key="A", success_count=100, failure_count=10) - variant_control = ExperimentVariantFunnelResult(key="B", success_count=100, failure_count=18) + variant_test = ExperimentVariantFunnelsBaseStats(key="A", success_count=100, failure_count=10) + variant_control = ExperimentVariantFunnelsBaseStats(key="B", success_count=100, failure_count=18) _, probability = calculate_probabilities(variant_control, [variant_test]) self.assertAlmostEqual(probability, 0.918, places=2) @@ -165,8 +164,8 @@ def test_calculate_results(self): self.assertAlmostEqual(credible_intervals[variant_test.key][1], 0.9494, places=3) def test_simulation_result_is_close_to_closed_form_solution(self): - variant_test = ExperimentVariantFunnelResult(key="A", success_count=100, failure_count=10) - variant_control = ExperimentVariantFunnelResult(key="B", success_count=100, failure_count=18) + variant_test = ExperimentVariantFunnelsBaseStats(key="A", success_count=100, failure_count=10) + variant_control = ExperimentVariantFunnelsBaseStats(key="B", success_count=100, failure_count=18) _, probability = calculate_probabilities(variant_control, [variant_test]) self.assertAlmostEqual(probability, 0.918, places=1) @@ -175,9 +174,9 @@ def test_simulation_result_is_close_to_closed_form_solution(self): self.assertAlmostEqual(probability, alternative_probability, places=1) def test_calculate_results_for_two_test_variants(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=100, failure_count=10) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=100, failure_count=3) - variant_control = ExperimentVariantFunnelResult(key="C", success_count=100, failure_count=18) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=100, failure_count=10) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=100, failure_count=3) + variant_control = ExperimentVariantFunnelsBaseStats(key="C", success_count=100, failure_count=18) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2]) self.assertAlmostEqual(sum(probabilities), 1) @@ -211,9 +210,9 @@ def test_calculate_results_for_two_test_variants(self): self.assertAlmostEqual(credible_intervals[variant_test_2.key][1], 0.9894, places=3) def test_calculate_results_for_two_test_variants_almost_equal(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=120, failure_count=60) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=110, failure_count=52) - variant_control = ExperimentVariantFunnelResult(key="C", success_count=130, failure_count=65) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=120, failure_count=60) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=110, failure_count=52) + variant_control = ExperimentVariantFunnelsBaseStats(key="C", success_count=130, failure_count=65) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2]) self.assertAlmostEqual(sum(probabilities), 1) @@ -246,8 +245,8 @@ def test_calculate_results_for_two_test_variants_almost_equal(self): self.assertAlmostEqual(credible_intervals[variant_test_2.key][1], 0.7460, places=3) def test_absolute_loss_less_than_one_percent_but_not_significant(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=286, failure_count=2014) - variant_control = ExperimentVariantFunnelResult(key="B", success_count=267, failure_count=2031) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=286, failure_count=2014) + variant_control = ExperimentVariantFunnelsBaseStats(key="B", success_count=267, failure_count=2031) probabilities = calculate_probabilities(variant_control, [variant_test_1]) self.assertAlmostEqual(sum(probabilities), 1) @@ -268,10 +267,10 @@ def test_absolute_loss_less_than_one_percent_but_not_significant(self): self.assertAlmostEqual(credible_intervals[variant_test_1.key][1], 0.1384, places=3) def test_calculate_results_for_three_test_variants(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=100, failure_count=10) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=100, failure_count=3) - variant_test_3 = ExperimentVariantFunnelResult(key="C", success_count=100, failure_count=30) - variant_control = ExperimentVariantFunnelResult(key="D", success_count=100, failure_count=18) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=100, failure_count=10) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=100, failure_count=3) + variant_test_3 = ExperimentVariantFunnelsBaseStats(key="C", success_count=100, failure_count=30) + variant_control = ExperimentVariantFunnelsBaseStats(key="D", success_count=100, failure_count=18) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2, variant_test_3]) self.assertAlmostEqual(sum(probabilities), 1) @@ -314,10 +313,10 @@ def test_calculate_results_for_three_test_variants(self): self.assertAlmostEqual(credible_intervals[variant_test_3.key][1], 0.8332, places=3) def test_calculate_results_for_three_test_variants_almost_equal(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=120, failure_count=60) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=110, failure_count=52) - variant_test_3 = ExperimentVariantFunnelResult(key="C", success_count=100, failure_count=46) - variant_control = ExperimentVariantFunnelResult(key="D", success_count=130, failure_count=65) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=120, failure_count=60) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=110, failure_count=52) + variant_test_3 = ExperimentVariantFunnelsBaseStats(key="C", success_count=100, failure_count=46) + variant_control = ExperimentVariantFunnelsBaseStats(key="D", success_count=130, failure_count=65) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2, variant_test_3]) self.assertAlmostEqual(sum(probabilities), 1) @@ -358,10 +357,10 @@ def test_calculate_results_for_three_test_variants_almost_equal(self): self.assertAlmostEqual(credible_intervals[variant_test_3.key][1], 0.7547, places=3) def test_calculate_results_for_three_test_variants_much_better_than_control(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=130, failure_count=60) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=135, failure_count=62) - variant_test_3 = ExperimentVariantFunnelResult(key="C", success_count=132, failure_count=60) - variant_control = ExperimentVariantFunnelResult(key="D", success_count=80, failure_count=65) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=130, failure_count=60) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=135, failure_count=62) + variant_test_3 = ExperimentVariantFunnelsBaseStats(key="C", success_count=132, failure_count=60) + variant_control = ExperimentVariantFunnelsBaseStats(key="D", success_count=80, failure_count=65) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2, variant_test_3]) self.assertAlmostEqual(sum(probabilities), 1) @@ -393,14 +392,14 @@ def test_calculate_results_for_three_test_variants_much_better_than_control(self self.assertAlmostEqual(credible_intervals[variant_test_3.key][1], 0.7488, places=3) def test_calculate_results_for_seven_test_variants(self): - variant_test_1 = ExperimentVariantFunnelResult(key="A", success_count=100, failure_count=17) - variant_test_2 = ExperimentVariantFunnelResult(key="B", success_count=100, failure_count=16) - variant_test_3 = ExperimentVariantFunnelResult(key="C", success_count=100, failure_count=30) - variant_test_4 = ExperimentVariantFunnelResult(key="D", success_count=100, failure_count=31) - variant_test_5 = ExperimentVariantFunnelResult(key="E", success_count=100, failure_count=29) - variant_test_6 = ExperimentVariantFunnelResult(key="F", success_count=100, failure_count=32) - variant_test_7 = ExperimentVariantFunnelResult(key="G", success_count=100, failure_count=33) - variant_control = ExperimentVariantFunnelResult(key="H", success_count=100, failure_count=18) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="A", success_count=100, failure_count=17) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="B", success_count=100, failure_count=16) + variant_test_3 = ExperimentVariantFunnelsBaseStats(key="C", success_count=100, failure_count=30) + variant_test_4 = ExperimentVariantFunnelsBaseStats(key="D", success_count=100, failure_count=31) + variant_test_5 = ExperimentVariantFunnelsBaseStats(key="E", success_count=100, failure_count=29) + variant_test_6 = ExperimentVariantFunnelsBaseStats(key="F", success_count=100, failure_count=32) + variant_test_7 = ExperimentVariantFunnelsBaseStats(key="G", success_count=100, failure_count=33) + variant_control = ExperimentVariantFunnelsBaseStats(key="H", success_count=100, failure_count=18) probabilities = calculate_probabilities( variant_control, @@ -488,8 +487,8 @@ def test_calculate_results_for_seven_test_variants(self): self.assertAlmostEqual(credible_intervals[variant_test_7.key][1], 0.8174, places=3) def test_calculate_results_control_is_significant(self): - variant_test = ExperimentVariantFunnelResult(key="test", success_count=100, failure_count=18) - variant_control = ExperimentVariantFunnelResult(key="control", success_count=100, failure_count=10) + variant_test = ExperimentVariantFunnelsBaseStats(key="test", success_count=100, failure_count=18) + variant_control = ExperimentVariantFunnelsBaseStats(key="control", success_count=100, failure_count=10) probabilities = calculate_probabilities(variant_control, [variant_test]) @@ -508,13 +507,13 @@ def test_calculate_results_control_is_significant(self): self.assertAlmostEqual(credible_intervals[variant_test.key][1], 0.9010, places=3) def test_calculate_results_many_variants_control_is_significant(self): - variant_test_1 = ExperimentVariantFunnelResult(key="test_1", success_count=100, failure_count=20) - variant_test_2 = ExperimentVariantFunnelResult(key="test_2", success_count=100, failure_count=21) - variant_test_3 = ExperimentVariantFunnelResult(key="test_3", success_count=100, failure_count=22) - variant_test_4 = ExperimentVariantFunnelResult(key="test_4", success_count=100, failure_count=23) - variant_test_5 = ExperimentVariantFunnelResult(key="test_5", success_count=100, failure_count=24) - variant_test_6 = ExperimentVariantFunnelResult(key="test_6", success_count=100, failure_count=25) - variant_control = ExperimentVariantFunnelResult(key="control", success_count=100, failure_count=10) + variant_test_1 = ExperimentVariantFunnelsBaseStats(key="test_1", success_count=100, failure_count=20) + variant_test_2 = ExperimentVariantFunnelsBaseStats(key="test_2", success_count=100, failure_count=21) + variant_test_3 = ExperimentVariantFunnelsBaseStats(key="test_3", success_count=100, failure_count=22) + variant_test_4 = ExperimentVariantFunnelsBaseStats(key="test_4", success_count=100, failure_count=23) + variant_test_5 = ExperimentVariantFunnelsBaseStats(key="test_5", success_count=100, failure_count=24) + variant_test_6 = ExperimentVariantFunnelsBaseStats(key="test_6", success_count=100, failure_count=25) + variant_control = ExperimentVariantFunnelsBaseStats(key="control", success_count=100, failure_count=10) variants_test = [ variant_test_1, diff --git a/ee/clickhouse/queries/experiments/test_trend_experiment_result.py b/ee/clickhouse/queries/experiments/test_trend_experiment_result.py index e2ee634812e65..de983e6f1496c 100644 --- a/ee/clickhouse/queries/experiments/test_trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/test_trend_experiment_result.py @@ -4,14 +4,13 @@ from flaky import flaky -from posthog.constants import ExperimentSignificanceCode -from posthog.hogql_queries.experiments.trend_statistics import ( +from posthog.hogql_queries.experiments.trends_statistics import ( are_results_significant, calculate_credible_intervals, calculate_p_value, calculate_probabilities, ) -from posthog.schema import ExperimentVariantTrendResult +from posthog.schema import ExperimentSignificanceCode, ExperimentVariantTrendsBaseStats Probability = float @@ -24,7 +23,7 @@ def logbeta(x: float, y: float) -> float: # Helper function to calculate probability using a different method than the one used in actual code # calculation: https://www.evanmiller.org/bayesian-ab-testing.html#count_ab def calculate_probability_of_winning_for_target_count_data( - target_variant: ExperimentVariantTrendResult, other_variants: list[ExperimentVariantTrendResult] + target_variant: ExperimentVariantTrendsBaseStats, other_variants: list[ExperimentVariantTrendsBaseStats] ) -> Probability: """ Calculates the probability of winning for target variant. @@ -98,8 +97,8 @@ def probability_C_beats_A_and_B_count_data( @flaky(max_runs=10, min_passes=1) class TestTrendExperimentCalculator(unittest.TestCase): def test_calculate_results(self): - variant_control = ExperimentVariantTrendResult(key="A", count=20, exposure=1, absolute_exposure=200) - variant_test = ExperimentVariantTrendResult(key="B", count=30, exposure=1, absolute_exposure=200) + variant_control = ExperimentVariantTrendsBaseStats(key="A", count=20, exposure=1, absolute_exposure=200) + variant_test = ExperimentVariantTrendsBaseStats(key="B", count=30, exposure=1, absolute_exposure=200) probabilities = calculate_probabilities(variant_control, [variant_test]) self.assertAlmostEqual(probabilities[1], 0.92, places=1) @@ -118,8 +117,8 @@ def test_calculate_results(self): self.assertAlmostEqual(credible_intervals[variant_test.key][1], 0.2141, places=3) def test_calculate_results_small_numbers(self): - variant_control = ExperimentVariantTrendResult(key="A", count=2, exposure=1, absolute_exposure=200) - variant_test = ExperimentVariantTrendResult(key="B", count=1, exposure=1, absolute_exposure=200) + variant_control = ExperimentVariantTrendsBaseStats(key="A", count=2, exposure=1, absolute_exposure=200) + variant_test = ExperimentVariantTrendsBaseStats(key="B", count=1, exposure=1, absolute_exposure=200) probabilities = calculate_probabilities(variant_control, [variant_test]) self.assertAlmostEqual(probabilities[1], 0.31, places=1) @@ -146,9 +145,9 @@ def test_calculate_count_data_probability(self): self.assertAlmostEqual(probability, probability2) def test_calculate_results_with_three_variants(self): - variant_control = ExperimentVariantTrendResult(key="A", count=20, exposure=1, absolute_exposure=200) - variant_test_1 = ExperimentVariantTrendResult(key="B", count=26, exposure=1, absolute_exposure=200) - variant_test_2 = ExperimentVariantTrendResult(key="C", count=19, exposure=1, absolute_exposure=200) + variant_control = ExperimentVariantTrendsBaseStats(key="A", count=20, exposure=1, absolute_exposure=200) + variant_test_1 = ExperimentVariantTrendsBaseStats(key="B", count=26, exposure=1, absolute_exposure=200) + variant_test_2 = ExperimentVariantTrendsBaseStats(key="C", count=19, exposure=1, absolute_exposure=200) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2]) self.assertAlmostEqual(probabilities[0], 0.16, places=1) @@ -172,9 +171,9 @@ def test_calculate_results_with_three_variants(self): self.assertAlmostEqual(credible_intervals[variant_test_2.key][1], 0.1484, places=3) def test_calculate_significance_when_target_variants_underperform(self): - variant_control = ExperimentVariantTrendResult(key="A", count=250, exposure=1, absolute_exposure=200) - variant_test_1 = ExperimentVariantTrendResult(key="B", count=180, exposure=1, absolute_exposure=200) - variant_test_2 = ExperimentVariantTrendResult(key="C", count=50, exposure=1, absolute_exposure=200) + variant_control = ExperimentVariantTrendsBaseStats(key="A", count=250, exposure=1, absolute_exposure=200) + variant_test_1 = ExperimentVariantTrendsBaseStats(key="B", count=180, exposure=1, absolute_exposure=200) + variant_test_2 = ExperimentVariantTrendsBaseStats(key="C", count=50, exposure=1, absolute_exposure=200) # in this case, should choose B as best test variant p_value = calculate_p_value(variant_control, [variant_test_1, variant_test_2]) @@ -188,7 +187,7 @@ def test_calculate_significance_when_target_variants_underperform(self): self.assertEqual(significant, ExperimentSignificanceCode.LOW_WIN_PROBABILITY) # new B variant is worse, such that control probability ought to be high enough - variant_test_1 = ExperimentVariantTrendResult(key="B", count=100, exposure=1, absolute_exposure=200) + variant_test_1 = ExperimentVariantTrendsBaseStats(key="B", count=100, exposure=1, absolute_exposure=200) significant, p_value = are_results_significant( variant_control, [variant_test_1, variant_test_2], [0.95, 0.03, 0.02] @@ -205,9 +204,9 @@ def test_calculate_significance_when_target_variants_underperform(self): self.assertAlmostEqual(credible_intervals[variant_test_2.key][1], 0.3295, places=3) def test_results_with_different_exposures(self): - variant_control = ExperimentVariantTrendResult(key="A", count=50, exposure=1.3, absolute_exposure=260) - variant_test_1 = ExperimentVariantTrendResult(key="B", count=30, exposure=1.8, absolute_exposure=360) - variant_test_2 = ExperimentVariantTrendResult(key="C", count=20, exposure=0.7, absolute_exposure=140) + variant_control = ExperimentVariantTrendsBaseStats(key="A", count=50, exposure=1.3, absolute_exposure=260) + variant_test_1 = ExperimentVariantTrendsBaseStats(key="B", count=30, exposure=1.8, absolute_exposure=360) + variant_test_2 = ExperimentVariantTrendsBaseStats(key="C", count=20, exposure=0.7, absolute_exposure=140) probabilities = calculate_probabilities(variant_control, [variant_test_1, variant_test_2]) # a is control self.assertAlmostEqual(probabilities[0], 0.86, places=1) diff --git a/ee/clickhouse/queries/experiments/trend_experiment_result.py b/ee/clickhouse/queries/experiments/trend_experiment_result.py index 74dcd89ccf48d..0971120f2366a 100644 --- a/ee/clickhouse/queries/experiments/trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/trend_experiment_result.py @@ -15,10 +15,9 @@ TRENDS_CUMULATIVE, TRENDS_LINEAR, UNIQUE_USERS, - ExperimentSignificanceCode, ExperimentNoResultsErrorKeys, ) -from posthog.hogql_queries.experiments.trend_statistics import ( +from posthog.hogql_queries.experiments.trends_statistics import ( are_results_significant, calculate_credible_intervals, calculate_probabilities, @@ -28,6 +27,7 @@ from posthog.models.team import Team from posthog.queries.trends.trends import Trends from posthog.queries.trends.util import ALL_SUPPORTED_MATH_FUNCTIONS +from posthog.schema import ExperimentSignificanceCode Probability = float diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index bfbb375e70990..4970a770854a2 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -177,38 +177,32 @@ def property_definitions(self, request: request.Request, **kw): return response.Response(group_type_index_to_properties) - @extend_schema( - parameters=[ - OpenApiParameter( - "group_type_index", - OpenApiTypes.INT, - description="Specify the group type to find property values of", - required=True, - ), - OpenApiParameter( - "key", - OpenApiTypes.STR, - description="Specify the property key to find values for", - required=True, - ), - ] - ) @action(methods=["GET"], detail=False) def property_values(self, request: request.Request, **kw): - rows = sync_execute( - f""" - SELECT {trim_quotes_expr("tupleElement(keysAndValues, 2)")} as value + value_filter = request.GET.get("value") + + query = f""" + SELECT {trim_quotes_expr("tupleElement(keysAndValues, 2)")} as value, count(*) as count FROM groups ARRAY JOIN JSONExtractKeysAndValuesRaw(group_properties) as keysAndValues - WHERE team_id = %(team_id)s AND group_type_index = %(group_type_index)s AND tupleElement(keysAndValues, 1) = %(key)s - GROUP BY tupleElement(keysAndValues, 2) - ORDER BY value ASC - """, - { - "team_id": self.team.pk, - "group_type_index": request.GET["group_type_index"], - "key": request.GET["key"], - }, - ) + WHERE team_id = %(team_id)s + AND group_type_index = %(group_type_index)s + AND tupleElement(keysAndValues, 1) = %(key)s + {f"AND {trim_quotes_expr('tupleElement(keysAndValues, 2)')} ILIKE %(value_filter)s" if value_filter else ""} + GROUP BY value + ORDER BY count DESC, value ASC + LIMIT 20 + """ + + params = { + "team_id": self.team.pk, + "group_type_index": request.GET["group_type_index"], + "key": request.GET["key"], + } + + if value_filter: + params["value_filter"] = f"%{value_filter}%" + + rows = sync_execute(query, params) - return response.Response([{"name": name[0]} for name in rows]) + return response.Response([{"name": name, "count": count} for name, count in rows]) diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 50bd46c5ac43e..99b8fdc7b75e1 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -5,11 +5,11 @@ from ee.api.test.base import APILicensedTest from dateutil import parser -from posthog.constants import ExperimentSignificanceCode from posthog.models.action.action import Action from posthog.models.cohort.cohort import Cohort from posthog.models.experiment import Experiment from posthog.models.feature_flag import FeatureFlag, get_feature_flags_for_team_in_cache +from posthog.schema import ExperimentSignificanceCode from posthog.test.base import ( ClickhouseTestMixin, _create_event, diff --git a/ee/clickhouse/views/test/test_clickhouse_groups.py b/ee/clickhouse/views/test/test_clickhouse_groups.py index 10e064095c421..22e0d6e21b5ae 100644 --- a/ee/clickhouse/views/test/test_clickhouse_groups.py +++ b/ee/clickhouse/views/test/test_clickhouse_groups.py @@ -309,17 +309,71 @@ def test_property_values(self): group_key="org:6", properties={"industry": "technology"}, ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:7", + properties={"industry": "finance-technology"}, + ) create_group( team_id=self.team.pk, group_type_index=1, group_key="org:1", properties={"industry": "finance"}, ) + + # Test without query parameter response_data = self.client.get( f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0" ).json() + self.assertEqual(len(response_data), 3) + self.assertEqual( + response_data, + [ + {"name": "finance", "count": 1}, + {"name": "finance-technology", "count": 1}, + {"name": "technology", "count": 1}, + ], + ) + + # Test with query parameter + response_data = self.client.get( + f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0&value=fin" + ).json() + self.assertEqual(len(response_data), 2) + self.assertEqual(response_data, [{"name": "finance", "count": 1}, {"name": "finance-technology", "count": 1}]) + + # Test with query parameter - case insensitive + response_data = self.client.get( + f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0&value=TECH" + ).json() self.assertEqual(len(response_data), 2) - self.assertEqual(response_data, [{"name": "finance"}, {"name": "technology"}]) + self.assertEqual( + response_data, [{"name": "finance-technology", "count": 1}, {"name": "technology", "count": 1}] + ) + + # Test with query parameter - no matches + response_data = self.client.get( + f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0&value=healthcare" + ).json() + self.assertEqual(len(response_data), 0) + self.assertEqual(response_data, []) + + # Test with query parameter - exact match + response_data = self.client.get( + f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0&value=technology" + ).json() + self.assertEqual(len(response_data), 2) + self.assertEqual( + response_data, [{"name": "finance-technology", "count": 1}, {"name": "technology", "count": 1}] + ) + + # Test with different group_type_index + response_data = self.client.get( + f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=1&value=fin" + ).json() + self.assertEqual(len(response_data), 1) + self.assertEqual(response_data, [{"name": "finance", "count": 1}]) def test_empty_property_values(self): create_group( diff --git a/ee/urls.py b/ee/urls.py index 633766add1439..f0cf168acffb0 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -30,7 +30,8 @@ def extend_api_router() -> None: projects_router, organizations_router, project_feature_flags_router, - project_dashboards_router, + environment_dashboards_router, + legacy_project_dashboards_router, ) root_router.register(r"billing", billing.BillingViewset, "billing") @@ -67,7 +68,14 @@ def extend_api_router() -> None: "environment_explicit_members", ["team_id"], ) - project_dashboards_router.register( + + environment_dashboards_router.register( + r"collaborators", + dashboard_collaborator.DashboardCollaboratorViewSet, + "environment_dashboard_collaborators", + ["project_id", "dashboard_id"], + ) + legacy_project_dashboards_router.register( r"collaborators", dashboard_collaborator.DashboardCollaboratorViewSet, "project_dashboard_collaborators", diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png index 02301a7432364..9fa6256d0c217 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png and b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--light.png b/frontend/__snapshots__/components-activitylog--insight-activity--light.png index 38e6385740f05..bc40839af4c33 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--light.png and b/frontend/__snapshots__/components-activitylog--insight-activity--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png b/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png index 4b3bbfb7ef31a..b06167e769fdd 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png and b/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--funnel--light.png b/frontend/__snapshots__/components-cards-insight-details--funnel--light.png index 7b27a2a43b0bc..5d111346c0470 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--funnel--light.png and b/frontend/__snapshots__/components-cards-insight-details--funnel--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png b/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png index dd7cf8f8f0bab..2e76b7bc519d0 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png and b/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png b/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png index 99bc9098d4009..f3469de9d12a5 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png and b/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png b/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png index 830631927cce0..2e76b7bc519d0 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png and b/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png b/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png index 890ece473187e..f3469de9d12a5 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png and b/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends--dark.png index 03aec116ddc62..0c52887653c4d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends--light.png b/frontend/__snapshots__/components-cards-insight-details--trends--light.png index 466db9c1347eb..d6e587e87dad6 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png index 331ace08cbf58..d5687670ca845 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png index ab1312b31681b..d40e270309c25 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png index 6b08cf6fd3d08..2d14d0f3f17d7 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png index 975a66aad5e55..c2afe2d05cbb1 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png index 4cb00e42bedf4..202f1a5b6cbf9 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png and b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png index e7e1ce404dc86..acf86648a2db9 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png and b/frontend/__snapshots__/components-errors-error-display--anonymous-error-with-stack-trace--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png b/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png index 49606ec8282b8..80e85acfb3a2f 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png and b/frontend/__snapshots__/components-errors-error-display--importing-module--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--importing-module--light.png b/frontend/__snapshots__/components-errors-error-display--importing-module--light.png index 9ef59796d43d1..bcb070820baf4 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--importing-module--light.png and b/frontend/__snapshots__/components-errors-error-display--importing-module--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png index 655e8fe709e2f..ea3e5b099ec89 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png and b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png index 6e76d42c30f72..540d3972b0895 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png and b/frontend/__snapshots__/components-errors-error-display--resize-observer-loop-limit-exceeded--light.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png b/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png index 624e96d22bdcc..fa4a28066135f 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png and b/frontend/__snapshots__/components-errors-error-display--safari-script-error--dark.png differ diff --git a/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png b/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png index 5e0992349e529..ef06a5729bc58 100644 Binary files a/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png and b/frontend/__snapshots__/components-errors-error-display--safari-script-error--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--basic--dark.png b/frontend/__snapshots__/components-properties-table--basic--dark.png index 3890636e34f8a..a561e6ed86b77 100644 Binary files a/frontend/__snapshots__/components-properties-table--basic--dark.png and b/frontend/__snapshots__/components-properties-table--basic--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--basic--light.png b/frontend/__snapshots__/components-properties-table--basic--light.png index de0f90740eb37..43498d9e17326 100644 Binary files a/frontend/__snapshots__/components-properties-table--basic--light.png and b/frontend/__snapshots__/components-properties-table--basic--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--dark.png index fdec449544189..80868ba79c924 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--dark.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--light.png index 4208f3bc30e2e..37d723ef28b8c 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--light.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-event--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--dark.png index e6488189d84e4..d13cdb0246334 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--dark.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--light.png index 84d14a856b752..d1064c7d548bf 100644 Binary files a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--light.png and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png b/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png index cf6449f3311d3..27f9ce425952e 100644 Binary files a/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png b/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png index a103d51e26771..c770b9ea51123 100644 Binary files a/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png and b/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png index 116e773e02c4e..72e68a0e0126a 100644 Binary files a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png and b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png index 50bbf12ca4b16..5334f7abc4f78 100644 Binary files a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png and b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png index b54d4facb0bfe..e3e93b3dd9678 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png index 332ea24ce3084..d4377362d9270 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-detailed--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--dark.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--dark.png index c2c9bf4b33166..8ac461d15d0bb 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--dark.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--light.png b/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--light.png index 74ac1c339682a..bbb59b784ba62 100644 Binary files a/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--light.png and b/frontend/__snapshots__/exporter-exporter--trends-line-insight-legend--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png index 0d5d3ebcc36d0..98d4a882ab5d5 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png and b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png index 7f58cbcc0104b..5336f30852c7d 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png and b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png index 62418d8fb584a..b4b8f4cfac4ae 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png index 404b840975200..5e27a455721a6 100644 Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png index 74e8409c16b8b..78ea79ea3f745 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png index b6706129af789..d0f525ffb382c 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png index c28a32b736a5e..f8d64397cb918 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png b/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png index a439dd9822a19..72976d20f6979 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome--light.png b/frontend/__snapshots__/scenes-app-max-ai--welcome--light.png index e46c5b1b9ded0..6047a48da0df7 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome--light.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome--light.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png index 8fd19ae835a64..72976d20f6979 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png index 96b45dc3059b4..64ba4b1cac7bb 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png and b/frontend/__snapshots__/scenes-app-max-ai--welcome-loading-suggestions--light.png differ diff --git a/frontend/__snapshots__/scenes-app-persons-modal--empty--dark.png b/frontend/__snapshots__/scenes-app-persons-modal--empty--dark.png index 4643f3192e890..22afc5bd775e2 100644 Binary files a/frontend/__snapshots__/scenes-app-persons-modal--empty--dark.png and b/frontend/__snapshots__/scenes-app-persons-modal--empty--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-persons-modal--empty--light.png b/frontend/__snapshots__/scenes-app-persons-modal--empty--light.png index f651a76388281..080e592257657 100644 Binary files a/frontend/__snapshots__/scenes-app-persons-modal--empty--light.png and b/frontend/__snapshots__/scenes-app-persons-modal--empty--light.png differ diff --git a/frontend/__snapshots__/scenes-app-persons-modal--server-error--dark.png b/frontend/__snapshots__/scenes-app-persons-modal--server-error--dark.png index be5d0970ca208..ee5eb6d97e9fd 100644 Binary files a/frontend/__snapshots__/scenes-app-persons-modal--server-error--dark.png and b/frontend/__snapshots__/scenes-app-persons-modal--server-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-persons-modal--server-error--light.png b/frontend/__snapshots__/scenes-app-persons-modal--server-error--light.png index 495ede9027f52..daa7e4e9adab1 100644 Binary files a/frontend/__snapshots__/scenes-app-persons-modal--server-error--light.png and b/frontend/__snapshots__/scenes-app-persons-modal--server-error--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png index 8ef8f708a388a..b8b549ad31a31 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png index 3110451381835..7df6f432cbc78 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png index 12428ce2a81d1..4fe845177fd6b 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png index cdffe0e290e45..d4cf49f5c030a 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--dark.png index 0a15706cb4e4d..1edab312f850d 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--light.png index bc01fbb47e885..4f71d1ea9ea70 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-multi-question-survey-section--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png index 0a15706cb4e4d..1edab312f850d 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png index bc01fbb47e885..4f71d1ea9ea70 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png index 6314548b93fc9..7d95e9fd8b9fa 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png index ee49269c03ab0..22ce2f0f5dd0d 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--dark.png index 640ccdf6f8368..2810d2fb0f8b7 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--light.png index f028151e1f64c..937b4384bc486 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-html-question-description--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--dark.png index 03c99a26ad3f4..27fb3f88c9ecd 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--light.png index c61b6af2f9573..9b00b16d952d9 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-with-text-question-description-that-does-not-render-html--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-templates--dark.png b/frontend/__snapshots__/scenes-app-surveys--survey-templates--dark.png index 171ff432aab34..78547df6e518c 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-templates--dark.png and b/frontend/__snapshots__/scenes-app-surveys--survey-templates--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-templates--light.png b/frontend/__snapshots__/scenes-app-surveys--survey-templates--light.png index 565188d48e86a..810be7de30307 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-templates--light.png and b/frontend/__snapshots__/scenes-app-surveys--survey-templates--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png new file mode 100644 index 0000000000000..00c3ef784c458 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png new file mode 100644 index 0000000000000..7f1124a16d059 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png b/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png index 991ec12471491..70d439fbf26a9 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-list--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png b/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png index cf9dbc52c21f1..0ae472abeef7e 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-list--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing--dark.png b/frontend/__snapshots__/scenes-other-billing--billing--dark.png index 3e80ab51cf847..001bea8f2daab 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing--light.png b/frontend/__snapshots__/scenes-other-billing--billing--light.png index a3aa8dd6bea18..71fc9742561df 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing--light.png and b/frontend/__snapshots__/scenes-other-billing--billing--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png index 0ec07ed31666d..957b54b65e78a 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png index e188811658eba..444c0bb0cdc24 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credits--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credits--dark.png index 2c453959c6511..f8700371c72c1 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credits--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credits--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credits--light.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credits--light.png index 443fb7dbdba2f..cc9f75044b5b4 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credits--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credits--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-discount--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-discount--dark.png index aa362bf427456..dc8947e11bfac 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-discount--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-discount--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-discount--light.png b/frontend/__snapshots__/scenes-other-billing--billing-with-discount--light.png index d5ad94b2cc832..229c85c75480a 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-discount--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-discount--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--dark.png index 3d7c9ce392160..8a0036d475bfa 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--light.png b/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--light.png index 1c76a3c15621d..3410f9cbfbcf0 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-limit-and-100-percent-discount--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index 2a139e2120b78..e97345976d3d7 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index 8de50aeef6858..a61ad424b9c7f 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png index de64e98db2029..b76714d7b9006 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png index 85dda5743b0b1..5d1b0d30a4d1c 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-user--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-user--light.png differ diff --git a/frontend/public/services/airtable.png b/frontend/public/services/airtable.png new file mode 100644 index 0000000000000..4d496006b1cc2 Binary files /dev/null and b/frontend/public/services/airtable.png differ diff --git a/frontend/public/services/attio.png b/frontend/public/services/attio.png new file mode 100644 index 0000000000000..6065fbe8977e1 Binary files /dev/null and b/frontend/public/services/attio.png differ diff --git a/frontend/public/services/discord.png b/frontend/public/services/discord.png new file mode 100644 index 0000000000000..d21ba297b0d9c Binary files /dev/null and b/frontend/public/services/discord.png differ diff --git a/frontend/public/services/google-ads.png b/frontend/public/services/google-ads.png new file mode 100644 index 0000000000000..04cb313d1a6a6 Binary files /dev/null and b/frontend/public/services/google-ads.png differ diff --git a/frontend/public/services/klaviyo.png b/frontend/public/services/klaviyo.png new file mode 100644 index 0000000000000..07c9f2764489a Binary files /dev/null and b/frontend/public/services/klaviyo.png differ diff --git a/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx b/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx index 8e37d85ccee66..0a287abee317f 100644 --- a/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx +++ b/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx @@ -74,7 +74,7 @@ export function ExportedInsight({
- +

{name || derived_name} diff --git a/frontend/src/layout.ejs b/frontend/src/layout.ejs index fe1f77f7731cb..b9649a9934ad4 100644 --- a/frontend/src/layout.ejs +++ b/frontend/src/layout.ejs @@ -7,7 +7,7 @@ {% include "head.html" %} <%= htmlWebpackPlugin.tags.headTags %><%/* This adds the main.css file! */%> diff --git a/frontend/src/layout.html b/frontend/src/layout.html index 688b942f5b217..a07ce3034f1a8 100644 --- a/frontend/src/layout.html +++ b/frontend/src/layout.html @@ -7,7 +7,7 @@ {% include "head.html" %} { title: (
Rollout Percentage - - Redistribute + +
), @@ -121,6 +125,7 @@ export const SidePanelExperimentFeatureFlag = (): JSX.Element => { }} min={0} max={100} + suffix={%} /> ), }, diff --git a/frontend/src/layout/navigation/EnvironmentSwitcher.tsx b/frontend/src/layout/navigation/EnvironmentSwitcher.tsx index 4744ed1571aa3..2432cb72c63f7 100644 --- a/frontend/src/layout/navigation/EnvironmentSwitcher.tsx +++ b/frontend/src/layout/navigation/EnvironmentSwitcher.tsx @@ -16,6 +16,14 @@ import { AvailableFeature } from '~/types' import { globalModalsLogic } from '../GlobalModals' import { environmentSwitcherLogic } from './environmentsSwitcherLogic' +/** + * Regex matching a possible emoji (any emoji) at the beginning of the string. + * Examples: In "👋 Hello", match group 1 is "👋". In "Hello" or "Hello 👋", there are no matches. + * From https://stackoverflow.com/a/67705964/351526 + */ +const EMOJI_INITIAL_REGEX = + /^(\u00a9|\u00ae|[\u25a0-\u27bf]|\ud83c[\ud000-\udfff]|\ud83d[\ud000-\udfff]|\ud83e[\ud000-\udfff]) / + export function EnvironmentSwitcherOverlay({ onClickInside }: { onClickInside?: () => void }): JSX.Element { const { sortedProjectsMap } = useValues(environmentSwitcherLogic) const { currentOrganization, projectCreationForbiddenReason } = useValues(organizationLogic) @@ -31,10 +39,16 @@ export function EnvironmentSwitcherOverlay({ onClickInside }: { onClickInside?: const projectSectionsResult: LemonMenuSection[] = [] for (const [projectId, [projectName, projectTeams]] of sortedProjectsMap.entries()) { + const projectNameWithoutEmoji = projectName.replace(EMOJI_INITIAL_REGEX, '').trim() + const projectNameEmojiMatch = projectName.match(EMOJI_INITIAL_REGEX)?.[1] const projectItems: LemonMenuItem[] = [ { - label: projectName, - icon: , + label: {projectNameWithoutEmoji}, + icon: projectNameEmojiMatch ? ( +
{projectNameEmojiMatch}
+ ) : ( + + ), disabledReason: 'Select an environment of this project below', onClick: () => {}, sideAction: { @@ -50,6 +64,7 @@ export function EnvironmentSwitcherOverlay({ onClickInside }: { onClickInside?: }, 'data-attr': 'new-environment-button', }, + className: 'opacity-100', }, ] for (const team of projectTeams) { @@ -154,6 +169,7 @@ function EnvironmentSwitcherSearch(): JSX.Element { value={environmentSwitcherSearch} onChange={setEnvironmentSwitcherSearch} type="search" + fullWidth autoFocus placeholder="Search projects & environments" className="min-w-64" diff --git a/frontend/src/layout/navigation/ProjectSwitcher.tsx b/frontend/src/layout/navigation/ProjectSwitcher.tsx index 60828a1b34059..03760245c9d32 100644 --- a/frontend/src/layout/navigation/ProjectSwitcher.tsx +++ b/frontend/src/layout/navigation/ProjectSwitcher.tsx @@ -101,6 +101,17 @@ function OtherProjectButton({ team }: { team: TeamBasicType; onClickInside?: () // and after switching is on a different page than before. let route = removeProjectIdIfPresent(location.pathname) route = removeFlagIdIfPresent(route) + + // List of routes that should redirect to project home + // instead of keeping the current path. + const redirectToHomeRoutes = ['/products', '/onboarding'] + + const shouldRedirectToHome = redirectToHomeRoutes.some((redirectRoute) => route.includes(redirectRoute)) + + if (shouldRedirectToHome) { + return urls.project(team.id) // Go to project home + } + return urls.project(team.id, route) }, [location.pathname]) diff --git a/frontend/src/lib/api.test.ts b/frontend/src/lib/api.test.ts index bea56e12d350f..a7da16722813c 100644 --- a/frontend/src/lib/api.test.ts +++ b/frontend/src/lib/api.test.ts @@ -37,7 +37,7 @@ describe('API helper', () => { ) expect(fakeFetch).toHaveBeenCalledWith( - '/api/projects/2/events?properties=%5B%7B%22key%22%3A%22something%22%2C%22value%22%3A%22is_set%22%2C%22operator%22%3A%22is_set%22%2C%22type%22%3A%22event%22%7D%5D&limit=10&orderBy=%5B%22-timestamp%22%5D', + '/api/environments/2/events?properties=%5B%7B%22key%22%3A%22something%22%2C%22value%22%3A%22is_set%22%2C%22operator%22%3A%22is_set%22%2C%22type%22%3A%22event%22%7D%5D&limit=10&orderBy=%5B%22-timestamp%22%5D', { signal: undefined, headers: { diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index b9b627f28953b..31933112d7b9a 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -16,6 +16,7 @@ import { DatabaseSerializedFieldType, ErrorTrackingGroup, HogCompileResponse, + HogQLVariable, QuerySchema, QueryStatusResponse, RecordingsQuery, @@ -49,11 +50,11 @@ import { Experiment, ExportedAssetType, ExternalDataJob, + ExternalDataSource, ExternalDataSourceCreatePayload, ExternalDataSourceSchema, ExternalDataSourceSyncSchema, ExternalDataSourceType, - ExternalDataStripeSource, FeatureFlagAssociatedRoleType, FeatureFlagType, Group, @@ -83,6 +84,7 @@ import { PluginConfigTypeNew, PluginConfigWithPluginInfoNew, PluginLogEntry, + ProjectType, PropertyDefinition, PropertyDefinitionType, QueryBasedInsightModel, @@ -138,7 +140,7 @@ export interface CountedPaginatedResponse extends PaginatedResponse { } export interface ActivityLogPaginatedResponse extends PaginatedResponse { - total_count: number // FIXME: This is non-standard naming, DRF uses `count` and we should use that consistently + count: number } export interface ApiMethodOptions { @@ -195,6 +197,7 @@ export async function getJSONOrNull(response: Response): Promise { export class ApiConfig { private static _currentOrganizationId: OrganizationType['id'] | null = null + private static _currentProjectId: ProjectType['id'] | null = null private static _currentTeamId: TeamType['id'] | null = null static getCurrentOrganizationId(): OrganizationType['id'] { @@ -218,6 +221,17 @@ export class ApiConfig { static setCurrentTeamId(id: TeamType['id']): void { this._currentTeamId = id } + + static getCurrentProjectId(): ProjectType['id'] { + if (!this._currentProjectId) { + throw new Error('Project ID is not known.') + } + return this._currentProjectId + } + + static setCurrentProjectId(id: ProjectType['id']): void { + this._currentProjectId = id + } } class ApiRequest { @@ -304,13 +318,22 @@ class ApiRequest { return this.addPathComponent('projects') } - public projectsDetail(id: TeamType['id'] = ApiConfig.getCurrentTeamId()): ApiRequest { + public projectsDetail(id: ProjectType['id'] = ApiConfig.getCurrentProjectId()): ApiRequest { return this.projects().addPathComponent(id) } + // # Projects + public environments(): ApiRequest { + return this.addPathComponent('environments') + } + + public environmentsDetail(id: TeamType['id'] = ApiConfig.getCurrentTeamId()): ApiRequest { + return this.environments().addPathComponent(id) + } + // # Insights public insights(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('insights') + return this.environmentsDetail(teamId).addPathComponent('insights') } public insight(id: QueryBasedInsightModel['id'], teamId?: TeamType['id']): ApiRequest { @@ -335,7 +358,7 @@ class ApiRequest { } public pluginConfigs(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('plugin_configs') + return this.environmentsDetail(teamId).addPathComponent('plugin_configs') } public pluginConfig(id: number, teamId?: TeamType['id']): ApiRequest { @@ -381,7 +404,7 @@ class ApiRequest { // # Exports public exports(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('exports') + return this.environmentsDetail(teamId).addPathComponent('exports') } public export(id: number, teamId?: TeamType['id']): ApiRequest { @@ -390,7 +413,7 @@ class ApiRequest { // # Events public events(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('events') + return this.environmentsDetail(teamId).addPathComponent('events') } public event(id: EventType['id'], teamId?: TeamType['id']): ApiRequest { @@ -402,16 +425,16 @@ class ApiRequest { } // # Data management - public eventDefinitions(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('event_definitions') + public eventDefinitions(projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('event_definitions') } - public eventDefinitionDetail(eventDefinitionId: EventDefinition['id'], teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('event_definitions').addPathComponent(eventDefinitionId) + public eventDefinitionDetail(eventDefinitionId: EventDefinition['id'], projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('event_definitions').addPathComponent(eventDefinitionId) } - public propertyDefinitions(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('property_definitions') + public propertyDefinitions(projectId?: ProjectType['id']): ApiRequest { + return this.projectsDetail(projectId).addPathComponent('property_definitions') } public propertyDefinitionDetail( @@ -458,13 +481,15 @@ class ApiRequest { // Recordings public recording(recordingId: SessionRecordingType['id'], teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('session_recordings').addPathComponent(recordingId) + return this.environmentsDetail(teamId).addPathComponent('session_recordings').addPathComponent(recordingId) } public recordings(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('session_recordings') + return this.environmentsDetail(teamId).addPathComponent('session_recordings') } public recordingMatchingEvents(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('session_recordings').addPathComponent('matching_events') + return this.environmentsDetail(teamId) + .addPathComponent('session_recordings') + .addPathComponent('matching_events') } public recordingPlaylists(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('session_recording_playlists') @@ -484,7 +509,7 @@ class ApiRequest { // # Dashboards public dashboards(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('dashboards') + return this.environmentsDetail(teamId).addPathComponent('dashboards') } public dashboardsDetail(dashboardId: DashboardType['id'], teamId?: TeamType['id']): ApiRequest { @@ -564,7 +589,7 @@ class ApiRequest { // # Persons public persons(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('persons') + return this.environmentsDetail(teamId).addPathComponent('persons') } public person(id: string | number, teamId?: TeamType['id']): ApiRequest { @@ -580,7 +605,7 @@ class ApiRequest { // # Groups public groups(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('groups') + return this.environmentsDetail(teamId).addPathComponent('groups') } // # Search @@ -719,11 +744,11 @@ class ApiRequest { // # Subscriptions public subscriptions(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('subscriptions') + return this.environmentsDetail(teamId).addPathComponent('subscriptions') } public subscription(id: SubscriptionType['id'], teamId?: TeamType['id']): ApiRequest { - return this.subscriptions(teamId).addPathComponent(id) + return this.environmentsDetail(teamId).addPathComponent(id) } // # Integrations @@ -746,12 +771,15 @@ class ApiRequest { // # Alerts public alerts(alertId?: AlertType['id'], insightId?: InsightModel['id'], teamId?: TeamType['id']): ApiRequest { if (alertId) { - return this.projectsDetail(teamId).addPathComponent('alerts').addPathComponent(alertId).withQueryString({ - insight_id: insightId, - }) + return this.environmentsDetail(teamId) + .addPathComponent('alerts') + .addPathComponent(alertId) + .withQueryString({ + insight_id: insightId, + }) } - return this.projectsDetail(teamId).addPathComponent('alerts').withQueryString({ + return this.environmentsDetail(teamId).addPathComponent('alerts').withQueryString({ insight_id: insightId, }) } @@ -775,7 +803,7 @@ class ApiRequest { // # Queries public query(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('query') + return this.environmentsDetail(teamId).addPathComponent('query') } public queryStatus(queryId: string, showProgress: boolean, teamId?: TeamType['id']): ApiRequest { @@ -788,7 +816,7 @@ class ApiRequest { // Chat public chat(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('query').addPathComponent('chat') + return this.environmentsDetail(teamId).addPathComponent('query').addPathComponent('chat') } // Notebooks @@ -802,7 +830,7 @@ class ApiRequest { // Batch Exports public batchExports(teamId?: TeamType['id']): ApiRequest { - return this.projectsDetail(teamId).addPathComponent('batch_exports') + return this.environmentsDetail(teamId).addPathComponent('batch_exports') } public batchExport(id: BatchExportConfiguration['id'], teamId?: TeamType['id']): ApiRequest { @@ -826,7 +854,7 @@ class ApiRequest { return this.projectsDetail(teamId).addPathComponent('external_data_sources') } - public externalDataSource(sourceId: ExternalDataStripeSource['id'], teamId?: TeamType['id']): ApiRequest { + public externalDataSource(sourceId: ExternalDataSource['id'], teamId?: TeamType['id']): ApiRequest { return this.externalDataSources(teamId).addPathComponent(sourceId) } @@ -842,6 +870,9 @@ class ApiRequest { public insightVariables(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('insight_variables') } + public insightVariable(variableId: string, teamId?: TeamType['id']): ApiRequest { + return this.insightVariables(teamId).addPathComponent(variableId) + } // ActivityLog public activity_log(teamId?: TeamType['id']): ApiRequest { @@ -907,14 +938,14 @@ const prepareUrl = (url: string): string => { return output } -const PROJECT_ID_REGEX = /\/api\/projects\/(\w+)(?:$|[/?#])/ +const PROJECT_ID_REGEX = /\/api\/(project|environment)s\/(\w+)(?:$|[/?#])/ const ensureProjectIdNotInvalid = (url: string): void => { const projectIdMatch = PROJECT_ID_REGEX.exec(url) if (projectIdMatch) { - const projectId = projectIdMatch[1].trim() + const projectId = projectIdMatch[2].trim() if (projectId === 'null' || projectId === 'undefined') { - throw { status: 0, detail: 'Cannot make request - project ID is unknown.' } + throw { status: 0, detail: `Cannot make request - ${projectIdMatch[1]} ID is unknown.` } } } } @@ -935,7 +966,8 @@ const api = { shortId: InsightModel['short_id'], basic?: boolean, refresh?: RefreshType, - filtersOverride?: DashboardFilter | null + filtersOverride?: DashboardFilter | null, + variablesOverride?: Record | null ): Promise>> { return new ApiRequest() .insights() @@ -945,6 +977,7 @@ const api = { basic, refresh, filters_override: filtersOverride, + variables_override: variablesOverride, }) ) .get() @@ -1049,33 +1082,59 @@ const api = { filters: Partial & { user?: UserBasicType['id'] }>, teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise> { - return new ApiRequest().activity_log(teamId).withQueryString(toParams(filters)).get() + return api.activity.listRequest(filters, teamId).get() + }, + + listRequest( + filters: Partial<{ + scope?: ActivityScope + scopes?: ActivityScope[] | string + user?: UserBasicType['id'] + page?: number + page_size?: number + item_id?: number | string + }>, + teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() + ): ApiRequest { + if (Array.isArray(filters.scopes)) { + filters.scopes = filters.scopes.join(',') + } + return new ApiRequest().activity_log(teamId).withQueryString(toParams(filters)) }, listLegacy( - activityLogProps: ActivityLogProps, + props: ActivityLogProps, page: number = 1, teamId: TeamType['id'] = ApiConfig.getCurrentTeamId() ): Promise> { + const scopes = Array.isArray(props.scope) ? [...props.scope] : [props.scope] + + // Opt into the new /activity_log API + if ([ActivityScope.PLUGIN, ActivityScope.HOG_FUNCTION].includes(scopes[0]) || scopes.length > 1) { + return api.activity + .listRequest({ + scopes, + ...(props.id ? { item_id: props.id } : {}), + page: page || 1, + page_size: ACTIVITY_PAGE_SIZE, + }) + .get() + } + // TODO: Can we replace all these endpoint specific implementations with the generic REST endpoint above? - const requestForScope: { [key in ActivityScope]?: (props: ActivityLogProps) => ApiRequest | null } = { - [ActivityScope.FEATURE_FLAG]: (props) => { + const requestForScope: { [key in ActivityScope]?: () => ApiRequest | null } = { + [ActivityScope.FEATURE_FLAG]: () => { return new ApiRequest().featureFlagsActivity((props.id ?? null) as number | null, teamId) }, - [ActivityScope.PERSON]: (props) => { + [ActivityScope.PERSON]: () => { return new ApiRequest().personActivity(props.id) }, [ActivityScope.INSIGHT]: () => { return new ApiRequest().insightsActivity(teamId) }, - [ActivityScope.PLUGIN]: () => { - return activityLogProps.id - ? new ApiRequest().pluginConfig(activityLogProps.id as number, teamId).withAction('activity') - : new ApiRequest().plugins().withAction('activity') - }, [ActivityScope.PLUGIN_CONFIG]: () => { - return activityLogProps.id - ? new ApiRequest().pluginConfig(activityLogProps.id as number, teamId).withAction('activity') + return props.id + ? new ApiRequest().pluginConfig(props.id as number, teamId).withAction('activity') : new ApiRequest().plugins().withAction('activity') }, [ActivityScope.DATA_MANAGEMENT]: () => { @@ -1090,21 +1149,21 @@ const api = { return new ApiRequest().dataManagementActivity() }, [ActivityScope.NOTEBOOK]: () => { - return activityLogProps.id - ? new ApiRequest().notebook(`${activityLogProps.id}`).withAction('activity') + return props.id + ? new ApiRequest().notebook(`${props.id}`).withAction('activity') : new ApiRequest().notebooks().withAction('activity') }, [ActivityScope.TEAM]: () => { return new ApiRequest().projectsDetail().withAction('activity') }, - [ActivityScope.SURVEY]: (props) => { + [ActivityScope.SURVEY]: () => { return new ApiRequest().surveyActivity((props.id ?? null) as string, teamId) }, } const pagingParameters = { page: page || 1, limit: ACTIVITY_PAGE_SIZE } - const request = requestForScope[activityLogProps.scope]?.(activityLogProps) - return request && request !== null + const request = requestForScope[scopes[0]]?.() + return request ? request.withQueryString(toParams(pagingParameters)).get() : Promise.resolve({ results: [], count: 0 }) }, @@ -1223,7 +1282,7 @@ const api = { }, async list({ limit = EVENT_DEFINITIONS_PER_PAGE, - teamId = ApiConfig.getCurrentTeamId(), + teamId, ...params }: { limit?: number @@ -1239,7 +1298,7 @@ const api = { }, determineListEndpoint({ limit = EVENT_DEFINITIONS_PER_PAGE, - teamId = ApiConfig.getCurrentTeamId(), + teamId, ...params }: { limit?: number @@ -1288,7 +1347,7 @@ const api = { }, async list({ limit = EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - teamId = ApiConfig.getCurrentTeamId(), + teamId, ...params }: { event_names?: string[] @@ -1314,7 +1373,7 @@ const api = { }, determineListEndpoint({ limit = EVENT_PROPERTY_DEFINITIONS_PER_PAGE, - teamId = ApiConfig.getCurrentTeamId(), + teamId, ...params }: { event_names?: string[] @@ -1342,7 +1401,7 @@ const api = { sessions: { async propertyDefinitions({ - teamId = ApiConfig.getCurrentTeamId(), + teamId, search, properties, }: { @@ -2024,6 +2083,13 @@ const api = { ): Promise { return await new ApiRequest().batchExportRun(id, runId, teamId).withAction('retry').create() }, + async cancelRun( + id: BatchExportConfiguration['id'], + runId: BatchExportRun['id'], + teamId?: TeamType['id'] + ): Promise { + return await new ApiRequest().batchExportRun(id, runId, teamId).withAction('cancel').create() + }, async logs( id: BatchExportConfiguration['id'], params: LogEntryRequestParams = {} @@ -2140,25 +2206,25 @@ const api = { }, }, externalDataSources: { - async list(options?: ApiMethodOptions | undefined): Promise> { + async list(options?: ApiMethodOptions | undefined): Promise> { return await new ApiRequest().externalDataSources().get(options) }, - async get(sourceId: ExternalDataStripeSource['id']): Promise { + async get(sourceId: ExternalDataSource['id']): Promise { return await new ApiRequest().externalDataSource(sourceId).get() }, async create(data: Partial): Promise<{ id: string }> { return await new ApiRequest().externalDataSources().create({ data }) }, - async delete(sourceId: ExternalDataStripeSource['id']): Promise { + async delete(sourceId: ExternalDataSource['id']): Promise { await new ApiRequest().externalDataSource(sourceId).delete() }, - async reload(sourceId: ExternalDataStripeSource['id']): Promise { + async reload(sourceId: ExternalDataSource['id']): Promise { await new ApiRequest().externalDataSource(sourceId).withAction('reload').create() }, async update( - sourceId: ExternalDataStripeSource['id'], - data: Partial - ): Promise { + sourceId: ExternalDataSource['id'], + data: Partial + ): Promise { return await new ApiRequest().externalDataSource(sourceId).update({ data }) }, async database_schema( @@ -2180,7 +2246,7 @@ const api = { .create({ data: { source_type, prefix } }) }, async jobs( - sourceId: ExternalDataStripeSource['id'], + sourceId: ExternalDataSource['id'], before: string | null, after: string | null ): Promise { @@ -2248,9 +2314,12 @@ const api = { async list(options?: ApiMethodOptions | undefined): Promise> { return await new ApiRequest().insightVariables().get(options) }, - async create(data: Partial): Promise { + async create(data: Partial): Promise { return await new ApiRequest().insightVariables().create({ data }) }, + async update(variableId: string, data: Partial): Promise { + return await new ApiRequest().insightVariable(variableId).update({ data }) + }, }, subscriptions: { @@ -2376,7 +2445,8 @@ const api = { queryId?: string, refresh?: boolean, async?: boolean, - filtersOverride?: DashboardFilter | null + filtersOverride?: DashboardFilter | null, + variablesOverride?: Record | null ): Promise< T extends { [response: string]: any } ? T['response'] extends infer P | undefined @@ -2387,7 +2457,13 @@ const api = { const refreshParam: RefreshType | undefined = refresh && async ? 'force_async' : async ? 'async' : refresh return await new ApiRequest().query().create({ ...options, - data: { query, client_query_id: queryId, refresh: refreshParam, filters_override: filtersOverride }, + data: { + query, + client_query_id: queryId, + refresh: refreshParam, + filters_override: filtersOverride, + variables_override: variablesOverride, + }, }) }, diff --git a/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx b/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx index 1a549691f9088..d850cd6258cdf 100644 --- a/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx +++ b/frontend/src/lib/components/ActivityLog/ActivityLog.stories.tsx @@ -42,7 +42,7 @@ const meta: Meta = { ctx.status(200), ctx.json({ results: featureFlagsActivityResponseJson }), ], - '/api/projects/:team/insights/activity': (_, __, ctx) => [ + '/api/environments/:team_id/insights/activity': (_, __, ctx) => [ ctx.status(200), ctx.json({ results: insightsActivityResponseJson }), ], diff --git a/frontend/src/lib/components/ActivityLog/ActivityLog.tsx b/frontend/src/lib/components/ActivityLog/ActivityLog.tsx index 384abdf8ee6ff..946630727fbc6 100644 --- a/frontend/src/lib/components/ActivityLog/ActivityLog.tsx +++ b/frontend/src/lib/components/ActivityLog/ActivityLog.tsx @@ -24,8 +24,8 @@ export type ActivityLogProps = ActivityLogLogicProps & { renderSideAction?: (logItem: HumanizedActivityLogItem) => JSX.Element } -const Empty = ({ scope }: { scope: string }): JSX.Element => { - const noun = scope +const Empty = ({ scope }: { scope: string | string[] }): JSX.Element => { + const noun = (Array.isArray(scope) ? scope[0] : scope) .replace(/([A-Z])/g, ' $1') .trim() .toLowerCase() diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.insight.test.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.insight.test.tsx index 2f290332b7e19..a302f831743a2 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.insight.test.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.insight.test.tsx @@ -13,7 +13,7 @@ describe('the activity log logic', () => { describe('humanizing insights', () => { const insightTestSetup = makeTestSetup( ActivityScope.INSIGHT, - `/api/projects/${MOCK_TEAM_ID}/insights/activity/` + `/api/environments/${MOCK_TEAM_ID}/insights/activity/` ) it('can handle change of name', async () => { diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.person.test.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.person.test.tsx index 8a34afd1c00a6..988b1b741f9c8 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.person.test.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.person.test.tsx @@ -8,7 +8,10 @@ import { ActivityScope } from '~/types' describe('the activity log logic', () => { describe('humanizing persons', () => { - const personTestSetup = makeTestSetup(ActivityScope.PERSON, `/api/projects/${MOCK_TEAM_ID}/persons/7/activity/`) + const personTestSetup = makeTestSetup( + ActivityScope.PERSON, + `/api/environments/${MOCK_TEAM_ID}/persons/7/activity/` + ) it('can handle addition of a property', async () => { const logic = await personTestSetup('test person', 'updated', [ { diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.plugin.test.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.plugin.test.tsx index 6010f55e044ef..3e4dbf573e399 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.plugin.test.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.plugin.test.tsx @@ -16,10 +16,7 @@ describe('the activity log logic', () => { }) describe('humanizing plugins', () => { - const pluginTestSetup = makeTestSetup( - ActivityScope.PLUGIN, - '/api/projects/:id/plugin_configs/:config_id/activity' - ) + const pluginTestSetup = makeTestSetup(ActivityScope.PLUGIN, '/api/projects/:id/activity_log') it('can handle installation of a plugin', async () => { const logic = await pluginTestSetup('the installed plugin', 'installed', null) const actual = logic.values.humanizedActivity diff --git a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx index 0c208293a212a..2e890724f9912 100644 --- a/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx +++ b/frontend/src/lib/components/ActivityLog/activityLogLogic.tsx @@ -16,13 +16,14 @@ import { dataManagementActivityDescriber } from 'scenes/data-management/dataMana import { flagActivityDescriber } from 'scenes/feature-flags/activityDescriptions' import { notebookActivityDescriber } from 'scenes/notebooks/Notebook/notebookActivityDescriber' import { personActivityDescriber } from 'scenes/persons/activityDescriptions' +import { hogFunctionActivityDescriber } from 'scenes/pipeline/hogfunctions/activityDescriptions' import { pluginActivityDescriber } from 'scenes/pipeline/pipelinePluginActivityDescriptions' import { insightActivityDescriber } from 'scenes/saved-insights/activityDescriptions' import { surveyActivityDescriber } from 'scenes/surveys/surveyActivityDescriber' import { teamActivityDescriber } from 'scenes/teamActivityDescriber' import { urls } from 'scenes/urls' -import { ActivityScope } from '~/types' +import { ActivityScope, PipelineNodeTab, PipelineStage, PipelineTab } from '~/types' import type { activityLogLogicType } from './activityLogLogicType' @@ -38,6 +39,8 @@ export const describerFor = (logItem?: ActivityLogItem): Describer | undefined = case ActivityScope.PLUGIN: case ActivityScope.PLUGIN_CONFIG: return pluginActivityDescriber + case ActivityScope.HOG_FUNCTION: + return hogFunctionActivityDescriber case ActivityScope.COHORT: return cohortActivityDescriber case ActivityScope.INSIGHT: @@ -59,23 +62,26 @@ export const describerFor = (logItem?: ActivityLogItem): Describer | undefined = } export type ActivityLogLogicProps = { - scope: ActivityScope + scope: ActivityScope | ActivityScope[] // if no id is provided, the list is not scoped by id and shows all activity ordered by time id?: number | string } export const activityLogLogic = kea([ props({} as ActivityLogLogicProps), - key(({ scope, id }) => `activity/${scope}/${id || 'all'}`), + key(({ scope, id }) => `activity/${Array.isArray(scope) ? scope.join(',') : scope}/${id || 'all'}`), path((key) => ['lib', 'components', 'ActivityLog', 'activitylog', 'logic', key]), actions({ setPage: (page: number) => ({ page }), }), loaders(({ values, props }) => ({ activity: [ - { results: [], total_count: 0 } as ActivityLogPaginatedResponse, + { results: [], count: 0 } as ActivityLogPaginatedResponse, { - fetchActivity: async () => await api.activity.listLegacy(props, values.page), + fetchActivity: async () => { + const response = await api.activity.listLegacy(props, values.page) + return { results: response.results, count: (response as any).total_count ?? response.count } + }, }, ], })), @@ -110,7 +116,7 @@ export const activityLogLogic = kea([ totalCount: [ (s) => [s.activity], (activity): number | null => { - return activity.total_count ?? null + return activity.count ?? null }, ], })), @@ -128,6 +134,7 @@ export const activityLogLogic = kea([ forceUsePageParam?: boolean ): void => { const pageInURL = searchParams['page'] + const firstScope = Array.isArray(props.scope) ? props.scope[0] : props.scope const shouldPage = forceUsePageParam || @@ -135,7 +142,7 @@ export const activityLogLogic = kea([ ([ActivityScope.FEATURE_FLAG, ActivityScope.INSIGHT, ActivityScope.PLUGIN].includes(pageScope) && searchParams['tab'] === 'history') - if (shouldPage && pageInURL && pageInURL !== values.page && pageScope === props.scope) { + if (shouldPage && pageInURL && pageInURL !== values.page && pageScope === firstScope) { actions.setPage(pageInURL) } @@ -161,6 +168,13 @@ export const activityLogLogic = kea([ onPageChange(searchParams, hashParams, ActivityScope.INSIGHT), [urls.featureFlag(':id')]: (_, searchParams, hashParams) => onPageChange(searchParams, hashParams, ActivityScope.FEATURE_FLAG, true), + [urls.pipelineNode(PipelineStage.Destination, ':id', PipelineNodeTab.History)]: ( + _, + searchParams, + hashParams + ) => onPageChange(searchParams, hashParams, ActivityScope.HOG_FUNCTION), + [urls.pipeline(PipelineTab.History)]: (_, searchParams, hashParams) => + onPageChange(searchParams, hashParams, ActivityScope.PLUGIN), } }), events(({ actions }) => ({ diff --git a/frontend/src/lib/components/Alerts/SnoozeButton.tsx b/frontend/src/lib/components/Alerts/SnoozeButton.tsx new file mode 100644 index 0000000000000..28516638f209c --- /dev/null +++ b/frontend/src/lib/components/Alerts/SnoozeButton.tsx @@ -0,0 +1,43 @@ +import { dayjs } from 'lib/dayjs' +import { formatDate } from 'lib/utils' + +import { DateFilter } from '../DateFilter/DateFilter' + +const DATETIME_FORMAT = 'MMM D - HH:mm' + +interface SnoozeButtonProps { + onChange: (snoonzeUntil: string) => void + value?: string +} + +export function SnoozeButton({ onChange, value }: SnoozeButtonProps): JSX.Element { + return ( + { + snoozeUntil && onChange(snoozeUntil) + }} + placeholder="Snooze until" + max={31} + isFixedDateMode + showRollingRangePicker={false} + allowedRollingDateOptions={['days', 'weeks', 'months', 'years']} + showCustom + dateOptions={[ + { + key: 'Tomorrow', + values: ['+1d'], + getFormattedDate: (date: dayjs.Dayjs): string => formatDate(date.add(1, 'd'), DATETIME_FORMAT), + defaultInterval: 'day', + }, + { + key: 'One week from now', + values: ['+1w'], + getFormattedDate: (date: dayjs.Dayjs): string => formatDate(date.add(1, 'w'), DATETIME_FORMAT), + defaultInterval: 'day', + }, + ]} + size="medium" + /> + ) +} diff --git a/frontend/src/lib/components/Alerts/alertFormLogic.ts b/frontend/src/lib/components/Alerts/alertFormLogic.ts index 4230dc9238d01..3c0ab234a8ae1 100644 --- a/frontend/src/lib/components/Alerts/alertFormLogic.ts +++ b/frontend/src/lib/components/Alerts/alertFormLogic.ts @@ -3,7 +3,7 @@ import { forms } from 'kea-forms' import api from 'lib/api' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' -import { AlertCalculationInterval } from '~/queries/schema' +import { AlertCalculationInterval, AlertConditionType, InsightThresholdType } from '~/queries/schema' import { QueryBasedInsightModel } from '~/types' import type { alertFormLogicType } from './alertFormLogicType' @@ -11,7 +11,7 @@ import { AlertType, AlertTypeWrite } from './types' export type AlertFormType = Pick< AlertType, - 'name' | 'enabled' | 'created_at' | 'threshold' | 'subscribed_users' | 'checks' | 'config' + 'name' | 'enabled' | 'created_at' | 'threshold' | 'condition' | 'subscribed_users' | 'checks' | 'config' > & { id?: AlertType['id'] created_by?: AlertType['created_by'] | null @@ -31,6 +31,8 @@ export const alertFormLogic = kea([ actions({ deleteAlert: true, + snoozeAlert: (snoozeUntil: string) => ({ snoozeUntil }), + clearSnooze: true, }), forms(({ props }) => ({ @@ -47,10 +49,9 @@ export const alertFormLogic = kea([ type: 'TrendsAlertConfig', series_index: 0, }, - threshold: { - configuration: { - absoluteThreshold: {}, - }, + threshold: { configuration: { type: InsightThresholdType.ABSOLUTE, bounds: {} } }, + condition: { + type: AlertConditionType.ABSOLUTE_VALUE, }, subscribed_users: [], checks: [], @@ -61,12 +62,17 @@ export const alertFormLogic = kea([ name: !name ? 'You need to give your alert a name' : undefined, }), submit: async (alert) => { - const payload: Partial = { + const payload: AlertTypeWrite = { ...alert, subscribed_users: alert.subscribed_users?.map(({ id }) => id), insight: props.insightId, } + // absolute value alert can only have absolute threshold + if (payload.condition.type === AlertConditionType.ABSOLUTE_VALUE) { + payload.threshold.configuration.type = InsightThresholdType.ABSOLUTE + } + try { if (alert.id === undefined) { const updatedAlert: AlertType = await api.alerts.create(payload) @@ -101,5 +107,21 @@ export const alertFormLogic = kea([ await api.alerts.delete(values.alertForm.id) props.onEditSuccess() }, + snoozeAlert: async ({ snoozeUntil }) => { + // resolution only allowed on created alert (which will have alertId) + if (!values.alertForm.id) { + throw new Error("Cannot resolve alert that doesn't exist") + } + await api.alerts.update(values.alertForm.id, { snoozed_until: snoozeUntil }) + props.onEditSuccess() + }, + clearSnooze: async () => { + // resolution only allowed on created alert (which will have alertId) + if (!values.alertForm.id) { + throw new Error("Cannot resolve alert that doesn't exist") + } + await api.alerts.update(values.alertForm.id, { snoozed_until: null }) + props.onEditSuccess() + }, })), ]) diff --git a/frontend/src/lib/components/Alerts/insightAlertsLogic.ts b/frontend/src/lib/components/Alerts/insightAlertsLogic.ts index dd6a09a29d08c..5ef92b84b8de2 100644 --- a/frontend/src/lib/components/Alerts/insightAlertsLogic.ts +++ b/frontend/src/lib/components/Alerts/insightAlertsLogic.ts @@ -3,7 +3,7 @@ import { loaders } from 'kea-loaders' import api from 'lib/api' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { GoalLine } from '~/queries/schema' +import { AlertConditionType, GoalLine, InsightThresholdType } from '~/queries/schema' import { getBreakdown, isInsightVizNode, isTrendsQuery } from '~/queries/utils' import { InsightLogicProps } from '~/types' @@ -65,21 +65,28 @@ export const insightAlertsLogic = kea([ (s) => [s.alerts], (alerts: AlertType[]): GoalLine[] => alerts.flatMap((alert) => { - const thresholds = [] + if ( + alert.threshold.configuration.type !== InsightThresholdType.ABSOLUTE || + alert.condition.type !== AlertConditionType.ABSOLUTE_VALUE || + !alert.threshold.configuration.bounds + ) { + return [] + } - const absoluteThreshold = alert.threshold.configuration.absoluteThreshold + const bounds = alert.threshold.configuration.bounds - if (absoluteThreshold?.upper !== undefined) { + const thresholds = [] + if (bounds?.upper !== undefined) { thresholds.push({ label: `${alert.name} Upper Threshold`, - value: absoluteThreshold?.upper, + value: bounds?.upper, }) } - if (absoluteThreshold?.lower !== undefined) { + if (bounds?.lower !== undefined) { thresholds.push({ label: `${alert.name} Lower Threshold`, - value: absoluteThreshold?.lower, + value: bounds?.lower, }) } diff --git a/frontend/src/lib/components/Alerts/types.ts b/frontend/src/lib/components/Alerts/types.ts index 864c2a2321909..4641d7fe0728f 100644 --- a/frontend/src/lib/components/Alerts/types.ts +++ b/frontend/src/lib/components/Alerts/types.ts @@ -12,6 +12,7 @@ export type AlertConfig = TrendsAlertConfig export interface AlertTypeBase { name: string condition: AlertCondition + threshold: { configuration: InsightThreshold } enabled: boolean insight: QueryBasedInsightModel config: AlertConfig @@ -20,6 +21,7 @@ export interface AlertTypeBase { export interface AlertTypeWrite extends Omit { subscribed_users: number[] insight: number + snoozed_until?: string | null } export interface AlertCheck { @@ -33,7 +35,7 @@ export interface AlertCheck { export interface AlertType extends AlertTypeBase { id: string subscribed_users: UserBasicType[] - threshold: { configuration: InsightThreshold } + condition: AlertCondition created_by: UserBasicType created_at: string state: AlertState @@ -41,4 +43,5 @@ export interface AlertType extends AlertTypeBase { last_checked_at: string checks: AlertCheck[] calculation_interval: AlertCalculationInterval + snoozed_until?: string } diff --git a/frontend/src/lib/components/Alerts/views/EditAlertModal.tsx b/frontend/src/lib/components/Alerts/views/EditAlertModal.tsx index 9a0c568bda465..b3c63ea6973e6 100644 --- a/frontend/src/lib/components/Alerts/views/EditAlertModal.tsx +++ b/frontend/src/lib/components/Alerts/views/EditAlertModal.tsx @@ -1,22 +1,24 @@ -import { LemonBanner, LemonCheckbox, LemonInput, LemonSelect, SpinnerOverlay } from '@posthog/lemon-ui' +import { LemonCheckbox, LemonInput, LemonSegmentedButton, LemonSelect, SpinnerOverlay } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { AlertStateIndicator } from 'lib/components/Alerts/views/ManageAlertsModal' import { MemberSelectMultiple } from 'lib/components/MemberSelectMultiple' import { TZLabel } from 'lib/components/TZLabel' import { UserActivityIndicator } from 'lib/components/UserActivityIndicator/UserActivityIndicator' +import { dayjs } from 'lib/dayjs' import { IconChevronLeft } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonModal } from 'lib/lemon-ui/LemonModal' -import { alphabet } from 'lib/utils' +import { alphabet, formatDate } from 'lib/utils' import { trendsDataLogic } from 'scenes/trends/trendsDataLogic' -import { AlertCalculationInterval } from '~/queries/schema' +import { AlertCalculationInterval, AlertConditionType, AlertState, InsightThresholdType } from '~/queries/schema' import { InsightShortId, QueryBasedInsightModel } from '~/types' import { alertFormLogic } from '../alertFormLogic' import { alertLogic } from '../alertLogic' +import { SnoozeButton } from '../SnoozeButton' import { AlertType } from '../types' export function AlertStateTable({ alert }: { alert: AlertType }): JSX.Element | null { @@ -27,7 +29,8 @@ export function AlertStateTable({ alert }: { alert: AlertType }): JSX.Element | return (

- Current status {alert.state} + Current status - {alert.state} + {alert.snoozed_until && ` until ${formatDate(dayjs(alert?.snoozed_until), 'MMM D, HH:mm')}`}{' '}

@@ -78,11 +81,11 @@ export function EditAlertModal({ const formLogicProps = { alert, insightId, onEditSuccess } const formLogic = alertFormLogic(formLogicProps) const { alertForm, isAlertFormSubmitting, alertFormChanged } = useValues(formLogic) - const { deleteAlert } = useActions(formLogic) + const { deleteAlert, snoozeAlert, clearSnooze } = useActions(formLogic) const { setAlertFormValue } = useActions(formLogic) const trendsLogic = trendsDataLogic({ dashboardItemId: insightShortId }) - const { alertSeries, breakdownFilter } = useValues(trendsLogic) + const { alertSeries, isNonTimeSeriesDisplay } = useValues(trendsLogic) const creatingNewAlert = alertForm.id === undefined @@ -107,112 +110,220 @@ export function EditAlertModal({ -
- {alert?.created_by ? ( - - ) : null} - - - - - - - - - - {breakdownFilter && ( - - - Alerts on insights with breakdowns alert when any of the breakdown values - breaches the threshold - - - )} - - - - ({ - label: `${alphabet[index]} - ${event}`, - value: index, - }))} - /> - - - - - ['hourly', 'daily'].includes(interval)) - .map((interval) => ({ - label: interval, - value: interval, - }))} - /> - - - - - - +
+
+
+ + - - + - - +
+ {alert?.created_by ? ( + + ) : null} +
- u.id) ?? []} - idKey="id" - onChange={(value) => setAlertFormValue('subscribed_users', value)} - /> +
+

Definition

+
+
+
When
+ + + ({ + label: `${alphabet[index]} - ${event}`, + value: index, + }))} + /> + + + + + + + +
+
+
less than
+ + + setAlertFormValue('threshold', { + configuration: { + type: alertForm.threshold.configuration.type, + bounds: { + ...alertForm.threshold.configuration.bounds, + lower: + value && + alertForm.threshold.configuration.type === + InsightThresholdType.PERCENTAGE + ? value / 100 + : value, + }, + }, + }) + } + /> + +
or more than
+ + + setAlertFormValue('threshold', { + configuration: { + type: alertForm.threshold.configuration.type, + bounds: { + ...alertForm.threshold.configuration.bounds, + upper: + value && + alertForm.threshold.configuration.type === + InsightThresholdType.PERCENTAGE + ? value / 100 + : value, + }, + }, + }) + } + /> + + {alertForm.condition.type !== AlertConditionType.ABSOLUTE_VALUE && ( + + + + + + )} +
+
+
+ {alertForm.condition.type === AlertConditionType.ABSOLUTE_VALUE + ? 'check' + : 'compare'} +
+ + ({ + label: interval, + value: interval, + }))} + /> + +
and notify
+
+ u.id) ?? []} + idKey="id" + onChange={(value) => setAlertFormValue('subscribed_users', value)} + /> +
+
+
+
{alert && }
- {!creatingNewAlert ? ( - - Delete alert - - ) : null} +
+ {!creatingNewAlert ? ( + + Delete alert + + ) : null} + {!creatingNewAlert && alert?.state === AlertState.FIRING ? ( + + ) : null} + {!creatingNewAlert && alert?.state === AlertState.SNOOZED ? ( + + Clear snooze + + ) : null} +
- - Cancel - - -
- ) : ( + return alert.state === AlertState.FIRING ? ( + ) : ( + + + ) } @@ -32,7 +32,9 @@ interface AlertListItemProps { } export function AlertListItem({ alert, onClick }: AlertListItemProps): JSX.Element { - const absoluteThreshold = alert.threshold?.configuration?.absoluteThreshold + const bounds = alert.threshold?.configuration?.bounds + const isPercentage = alert.threshold?.configuration.type === InsightThresholdType.PERCENTAGE + return (
@@ -42,9 +44,11 @@ export function AlertListItem({ alert, onClick }: AlertListItemProps): JSX.Eleme {alert.enabled ? (
- {absoluteThreshold?.lower && `Low ${absoluteThreshold.lower}`} - {absoluteThreshold?.lower && absoluteThreshold?.upper ? ' · ' : ''} - {absoluteThreshold?.upper && `High ${absoluteThreshold.upper}`} + {bounds?.lower && + `Low ${isPercentage ? bounds.lower * 100 : bounds.lower}${isPercentage ? '%' : ''}`} + {bounds?.lower && bounds?.upper ? ' · ' : ''} + {bounds?.upper && + `High ${isPercentage ? bounds.upper * 100 : bounds.upper}${isPercentage ? '%' : ''}`}
) : (
Disabled
diff --git a/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts b/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts index b6d2f1c5fc8eb..a8c984dc0e928 100644 --- a/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts +++ b/frontend/src/lib/components/AnnotationsOverlay/annotationsOverlayLogic.test.ts @@ -181,7 +181,7 @@ function useInsightMocks(interval: string = 'day', timezone: string = 'UTC'): vo } useMocks({ get: { - '/api/projects/:team_id/insights/': () => { + '/api/environments/:team_id/insights/': () => { return [ 200, { @@ -189,7 +189,7 @@ function useInsightMocks(interval: string = 'day', timezone: string = 'UTC'): vo }, ] }, - [`/api/projects/:team_id/insights/${MOCK_INSIGHT_NUMERIC_ID}`]: () => { + [`/api/environments/:team_id/insights/${MOCK_INSIGHT_NUMERIC_ID}`]: () => { return [200, insight] }, '/api/users/@me/': [200, {}], diff --git a/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts b/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts index b8678715352ea..6e9897b9e2693 100644 --- a/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts +++ b/frontend/src/lib/components/AuthorizedUrlList/authorizedUrlListLogic.test.ts @@ -20,7 +20,7 @@ describe('the authorized urls list logic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/insights/trend/': (req) => { + '/api/environments/:team_id/insights/trend/': (req) => { if (JSON.parse(req.url.searchParams.get('events') || '[]')?.[0]?.throw) { return [500, { status: 0, detail: 'error from the API' }] } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss index 9a8afaa08b7af..c61a7a179be63 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss @@ -108,10 +108,6 @@ } } -.InsightDetails__breakdown { - margin-bottom: 0.5rem; -} - .InsightDetails__footer { display: flex; flex-wrap: wrap; @@ -144,6 +140,7 @@ font-weight: 600; line-height: 1rem; color: var(--text-3000); + vertical-align: middle; background: var(--primary-highlight); border-radius: var(--radius); @@ -177,10 +174,6 @@ .SeriesDisplay__condition { display: flex; - - .SeriesDisplay__raw-name { - vertical-align: middle; - } } .SeriesDisplay__arrow { diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx index 474665182957f..a029e3271651a 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx @@ -11,6 +11,7 @@ import { insightLogic } from 'scenes/insights/insightLogic' import { ErrorBoundary } from '~/layout/ErrorBoundary' import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { Query } from '~/queries/Query/Query' +import { HogQLVariable } from '~/queries/schema' import { DashboardBasicType, DashboardPlacement, @@ -60,6 +61,8 @@ export interface InsightCardProps extends Resizeable, React.HTMLAttributes } function InsightCardInternal( @@ -90,6 +93,7 @@ function InsightCardInternal( placement, loadPriority, doNotLoad, + variablesOverride, ...divProps }: InsightCardProps, ref: React.Ref @@ -141,6 +145,7 @@ function InsightCardInternal( showEditingControls={showEditingControls} showDetailsControls={showDetailsControls} moreButtons={moreButtons} + variablesOverride={variablesOverride} />
diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx index 9a0038556dea6..347fd3227ce40 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx @@ -1,5 +1,6 @@ import { useValues } from 'kea' import { + convertPropertiesToPropertyGroup, formatPropertyLabel, isAnyPropertyfilter, isCohortPropertyFilter, @@ -41,13 +42,7 @@ import { isTrendsQuery, isValidBreakdown, } from '~/queries/utils' -import { - AnyPropertyFilter, - FilterLogicalOperator, - FilterType, - PropertyGroupFilter, - QueryBasedInsightModel, -} from '~/types' +import { AnyPropertyFilter, FilterLogicalOperator, PropertyGroupFilter, QueryBasedInsightModel } from '~/types' import { PropertyKeyInfo } from '../../PropertyKeyInfo' import { TZLabel } from '../../TZLabel' @@ -251,11 +246,11 @@ function PathsSummary({ query }: { query: PathsQuery }): JSX.Element { ) } -export function SeriesSummary({ query }: { query: InsightQueryNode }): JSX.Element { +export function SeriesSummary({ query, heading }: { query: InsightQueryNode; heading?: JSX.Element }): JSX.Element { return ( - <> -
Query summary
-
+
+
{heading || 'Query summary'}
+
{isTrendsQuery(query) && query.trendsFilter?.formula && ( <> } fullWidth> @@ -283,8 +278,8 @@ export function SeriesSummary({ query }: { query: InsightQueryNode }): JSX.Eleme Unavailable for this insight type. )}
-
- +
+ ) } @@ -293,44 +288,13 @@ export function PropertiesSummary({ }: { properties: PropertyGroupFilter | AnyPropertyFilter[] | undefined }): JSX.Element { - const groupFilter: PropertyGroupFilter | null = Array.isArray(properties) - ? { - type: FilterLogicalOperator.And, - values: [ - { - type: FilterLogicalOperator.And, - values: properties, - }, - ], - } - : properties || null - return ( - <> +
Filters
-
- -
- - ) -} - -export function LEGACY_FilterBasedBreakdownSummary({ filters }: { filters: Partial }): JSX.Element | null { - if (filters.breakdown_type == null || filters.breakdown == null) { - return null - } - - const breakdownArray = Array.isArray(filters.breakdown) ? filters.breakdown : [filters.breakdown] - - return ( - <> -
Breakdown by
-
- {breakdownArray.map((breakdown) => ( - - ))} -
- +
+ +
+
) } @@ -342,9 +306,9 @@ export function BreakdownSummary({ query }: { query: InsightQueryNode }): JSX.El const { breakdown_type, breakdown, breakdowns } = query.breakdownFilter return ( - <> +
Breakdown by
-
+
{Array.isArray(breakdowns) ? breakdowns.map((b) => ( @@ -355,8 +319,8 @@ export function BreakdownSummary({ query }: { query: InsightQueryNode }): JSX.El : [breakdown].map((b) => ( )))} -
- +
+ ) } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx index 6de4f0c2ff638..5f6a526a19dd4 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx @@ -44,6 +44,7 @@ interface InsightMetaProps | 'showEditingControls' | 'showDetailsControls' | 'moreButtons' + | 'variablesOverride' > { insight: QueryBasedInsightModel areDetailsShown?: boolean @@ -55,6 +56,7 @@ export function InsightMeta({ ribbonColor, dashboardId, updateColor, + variablesOverride, removeFromDashboard, deleteWithUndo, refresh, @@ -95,10 +97,10 @@ export function InsightMeta({ refreshDisabledReason={refreshDisabledReason} setAreDetailsShown={setAreDetailsShown} areDetailsShown={areDetailsShown} - topHeading={} + topHeading={} meta={ <> - +

{name || {summary}} {loading && ( @@ -130,7 +132,7 @@ export function InsightMeta({ moreButtons={ <> <> - + View {refresh && ( diff --git a/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx b/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx index c9455d61067cc..e32c0ff48673a 100644 --- a/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx @@ -1,7 +1,7 @@ import { dateFilterToText } from 'lib/utils' import { InsightTypeMetadata, QUERY_TYPES_METADATA } from 'scenes/saved-insights/SavedInsights' -import { NodeKind } from '~/queries/schema' +import { Node, NodeKind } from '~/queries/schema' import { containsHogQLQuery, dateRangeFor, @@ -9,11 +9,8 @@ import { isInsightQueryNode, isInsightVizNode, } from '~/queries/utils' -import { QueryBasedInsightModel } from '~/types' - -export function TopHeading({ insight }: { insight: QueryBasedInsightModel }): JSX.Element { - const { query } = insight +export function TopHeading({ query }: { query: Node | null }): JSX.Element { let insightType: InsightTypeMetadata if (query?.kind) { diff --git a/frontend/src/lib/components/DateFilter/DateFilter.tsx b/frontend/src/lib/components/DateFilter/DateFilter.tsx index e8597357d3e58..d3a35d762b144 100644 --- a/frontend/src/lib/components/DateFilter/DateFilter.tsx +++ b/frontend/src/lib/components/DateFilter/DateFilter.tsx @@ -38,6 +38,7 @@ export interface DateFilterProps { dropdownPlacement?: Placement /* True when we're not dealing with ranges, but a single date / relative date */ isFixedDateMode?: boolean + placeholder?: string } interface RawDateFilterProps extends DateFilterProps { dateFrom?: string | null | dayjs.Dayjs @@ -62,6 +63,7 @@ export function DateFilter({ max, isFixedDateMode = false, allowedRollingDateOptions, + placeholder, }: RawDateFilterProps): JSX.Element { const key = useRef(uuid()).current const logicProps: DateFilterLogicProps = { @@ -72,6 +74,7 @@ export function DateFilter({ dateOptions, isDateFormatted, isFixedDateMode, + placeholder, } const { open, diff --git a/frontend/src/lib/components/DateFilter/dateFilterLogic.ts b/frontend/src/lib/components/DateFilter/dateFilterLogic.ts index 0a1f3680dbc1b..7d8593963d7b7 100644 --- a/frontend/src/lib/components/DateFilter/dateFilterLogic.ts +++ b/frontend/src/lib/components/DateFilter/dateFilterLogic.ts @@ -112,8 +112,9 @@ export const dateFilterLogic = kea([ s.isFixedDate, s.dateOptions, (_, p) => p.isFixedDateMode, + (_, p) => p.placeholder, ], - (dateFrom, dateTo, isFixedRange, isDateToNow, isFixedDate, dateOptions, isFixedDateMode) => + (dateFrom, dateTo, isFixedRange, isDateToNow, isFixedDate, dateOptions, isFixedDateMode, placeholder) => isFixedRange ? formatDateRange(dayjs(dateFrom), dayjs(dateTo)) : isDateToNow @@ -123,7 +124,9 @@ export const dateFilterLogic = kea([ : dateFilterToText( dateFrom, dateTo, - isFixedDateMode ? SELECT_FIXED_VALUE_PLACEHOLDER : NO_OVERRIDE_RANGE_PLACEHOLDER, + isFixedDateMode + ? placeholder ?? SELECT_FIXED_VALUE_PLACEHOLDER + : NO_OVERRIDE_RANGE_PLACEHOLDER, dateOptions, false ), diff --git a/frontend/src/lib/components/DateFilter/types.ts b/frontend/src/lib/components/DateFilter/types.ts index 3ebdb781b7c8c..2e95131e9cb34 100644 --- a/frontend/src/lib/components/DateFilter/types.ts +++ b/frontend/src/lib/components/DateFilter/types.ts @@ -17,6 +17,7 @@ export type DateFilterLogicProps = { dateOptions?: DateMappingOption[] isDateFormatted?: boolean isFixedDateMode?: boolean + placeholder?: string } export const CUSTOM_OPTION_KEY = 'Custom' diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.stories.tsx b/frontend/src/lib/components/Errors/ErrorDisplay.stories.tsx index f2b1295f0b1dd..7e327618e48d0 100644 --- a/frontend/src/lib/components/Errors/ErrorDisplay.stories.tsx +++ b/frontend/src/lib/components/Errors/ErrorDisplay.stories.tsx @@ -125,7 +125,7 @@ export function ImportingModule(): JSX.Element { eventProperties={errorProperties({ $exception_type: 'UnhandledRejection', $exception_message: "Importing module '/static/chunk-PIJHGO7Q.js' is not found.", - $exception_stack_trace_raw: '[]', + $exception_list: [], $exception_handled: false, })} /> @@ -136,10 +136,23 @@ export function AnonymousErrorWithStackTrace(): JSX.Element { return ( ","function":"?","in_app":true,"lineno":1,"colno":26}]', + $exception_list: [ + { + type: 'Error', + value: 'wat123', + stacktrace: { + frames: [ + { + filename: '', + function: '?', + in_app: true, + lineno: 1, + colno: 26, + }, + ], + }, + }, + ], })} /> ) diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.tsx b/frontend/src/lib/components/Errors/ErrorDisplay.tsx index 730de622a622a..58770420674d0 100644 --- a/frontend/src/lib/components/Errors/ErrorDisplay.tsx +++ b/frontend/src/lib/components/Errors/ErrorDisplay.tsx @@ -123,9 +123,6 @@ function ActiveFlags({ flags }: { flags: string[] }): JSX.Element { export function getExceptionPropertiesFrom(eventProperties: Record): Record { const { - $exception_type, - $exception_message, - $exception_synthetic, $lib, $lib_version, $browser, @@ -138,27 +135,29 @@ export function getExceptionPropertiesFrom(eventProperties: Record) $level, } = eventProperties - let $exception_stack_trace_raw = eventProperties.$exception_stack_trace_raw + let $exception_type = eventProperties.$exception_type + let $exception_message = eventProperties.$exception_message + let $exception_synthetic = eventProperties.$exception_synthetic let $exception_list = eventProperties.$exception_list - // exception autocapture sets $exception_stack_trace_raw as a string - // if it isn't present then this is probably a sentry exception. - // try and grab the frames from that - if (!$exception_stack_trace_raw?.length && $sentry_exception) { - if (Array.isArray($sentry_exception.values)) { - const firstException = $sentry_exception.values[0] - if (firstException.stacktrace) { - $exception_stack_trace_raw = JSON.stringify(firstException.stacktrace.frames) - } - } - } - // exception autocapture sets $exception_list for chained exceptions. - // If it's not present, get this list from the sentry_exception + + // exception autocapture sets $exception_list for all exceptions. + // If it's not present, then this is probably a sentry exception. Get this list from the sentry_exception if (!$exception_list?.length && $sentry_exception) { if (Array.isArray($sentry_exception.values)) { $exception_list = $sentry_exception.values } } + if (!$exception_type) { + $exception_type = $exception_list?.[0]?.type + } + if (!$exception_message) { + $exception_message = $exception_list?.[0]?.value + } + if ($exception_synthetic == undefined) { + $exception_synthetic = $exception_list?.[0]?.mechanism?.synthetic + } + return { $exception_type, $exception_message, @@ -171,7 +170,6 @@ export function getExceptionPropertiesFrom(eventProperties: Record) $os_version, $active_feature_flags, $sentry_url, - $exception_stack_trace_raw, $exception_list, $level, } @@ -190,7 +188,6 @@ export function ErrorDisplay({ eventProperties }: { eventProperties: EventType[' $os_version, $active_feature_flags, $sentry_url, - $exception_stack_trace_raw, $exception_list, $level, } = getExceptionPropertiesFrom(eventProperties) @@ -224,17 +221,7 @@ export function ErrorDisplay({ eventProperties }: { eventProperties: EventType[' - {$exception_list?.length ? ( - - ) : $exception_stack_trace_raw?.length ? ( - <> - -
-

Stack Trace

- -
- - ) : null} + {$exception_list?.length ? : null}

Active Feature Flags

diff --git a/frontend/src/lib/components/Errors/error-display.test.ts b/frontend/src/lib/components/Errors/error-display.test.ts index fbb6bd1a62015..2e9024e80b7b2 100644 --- a/frontend/src/lib/components/Errors/error-display.test.ts +++ b/frontend/src/lib/components/Errors/error-display.test.ts @@ -1,7 +1,7 @@ import { getExceptionPropertiesFrom } from 'lib/components/Errors/ErrorDisplay' describe('Error Display', () => { - it('can read sentry stack trace when $exception_stack_trace_raw is not present', () => { + it('can read sentry stack trace when $exception_list is not present', () => { const eventProperties = { 'should not be in the': 'result', $browser: 'Chrome', @@ -53,8 +53,6 @@ describe('Error Display', () => { $browser: 'Chrome', $browser_version: '92.0.4515', $exception_message: 'There was an error creating the support ticket with zendesk.', - $exception_stack_trace_raw: - '[{"colno":220,"filename":"https://app-static-prod.posthog.com/static/chunk-UFQKIDIH.js","function":"submitZendeskTicket","in_app":true,"lineno":25}]', $exception_list: [ { mechanism: { @@ -118,7 +116,6 @@ describe('Error Display', () => { $browser: 'Chrome', $browser_version: '92.0.4515', $exception_message: 'the message sent into sentry captureMessage', - $exception_stack_trace_raw: undefined, $exception_synthetic: undefined, $exception_type: undefined, $lib: 'posthog-js', @@ -130,4 +127,77 @@ describe('Error Display', () => { 'https://sentry.io/organizations/posthog/issues/?project=1899813&query=40e442d79c22473391aeeeba54c82163', }) }) + + it('can read exception_list stack trace when $exception_type and message are not present', () => { + const eventProperties = { + 'should not be in the': 'result', + $browser: 'Chrome', + $browser_version: '92.0.4515', + $active_feature_flags: ['feature1,feature2'], + $lib: 'posthog-js', + $lib_version: '1.0.0', + $os: 'Windows', + $os_version: '10', + $exception_list: [ + { + mechanism: { + handled: true, + type: 'generic', + synthetic: false, + }, + stacktrace: { + frames: [ + { + colno: 220, + filename: 'https://app-static-prod.posthog.com/static/chunk-UFQKIDIH.js', + function: 'submitZendeskTicket', + in_app: true, + lineno: 25, + }, + ], + }, + type: 'Error', + value: 'There was an error creating the support ticket with zendesk2.', + }, + ], + $exception_personURL: 'https://app.posthog.com/person/f6kW3HXaha6dAvHZiOmgrcAXK09682P6nNPxvfjqM9c', + } + const result = getExceptionPropertiesFrom(eventProperties) + expect(result).toEqual({ + $active_feature_flags: ['feature1,feature2'], + $browser: 'Chrome', + $browser_version: '92.0.4515', + $exception_message: 'There was an error creating the support ticket with zendesk2.', + $exception_synthetic: false, + $exception_type: 'Error', + $lib: 'posthog-js', + $lib_version: '1.0.0', + $level: undefined, + $os: 'Windows', + $os_version: '10', + $sentry_url: undefined, + $exception_list: [ + { + mechanism: { + handled: true, + type: 'generic', + synthetic: false, + }, + stacktrace: { + frames: [ + { + colno: 220, + filename: 'https://app-static-prod.posthog.com/static/chunk-UFQKIDIH.js', + function: 'submitZendeskTicket', + in_app: true, + lineno: 25, + }, + ], + }, + type: 'Error', + value: 'There was an error creating the support ticket with zendesk2.', + }, + ], + }) + }) }) diff --git a/frontend/src/lib/components/JSSnippet.tsx b/frontend/src/lib/components/JSSnippet.tsx index 05cbf0cfb6139..0d93f5d71f845 100644 --- a/frontend/src/lib/components/JSSnippet.tsx +++ b/frontend/src/lib/components/JSSnippet.tsx @@ -20,7 +20,7 @@ export function snippetFunctions(): string { } const snippetMethods = methods.join(' ') - return `!function(t,e){var o,n,p,r;e.__SV||(window.posthog=e,e._i=[],e.init=function(i,s,a){function g(t,e){var o=e.split(".");2==o.length&&(t=t[o[0]],e=o[1]),t[e]=function(){t.push([e].concat(Array.prototype.slice.call(arguments,0)))}}(p=t.createElement("script")).type="text/javascript",p.async=!0,p.src=s.api_host.replace(".i.posthog.com","-assets.i.posthog.com")+"/static/array.js",(r=t.getElementsByTagName("script")[0]).parentNode.insertBefore(p,r);var u=e;for(void 0!==a?u=e[a]=[]:a="posthog",u.people=u.people||[],u.toString=function(t){var e="posthog";return"posthog"!==a&&(e+="."+a),t||(e+=" (stub)"),e},u.people.toString=function(){return u.toString(1)+".people (stub)"},o="${snippetMethods}".split(" "),n=0;n { if (!properties || Array.isArray(properties)) { return [] } - let entries = Object.entries(properties) + let entries = Object.entries(properties).sort((a, b) => { + // if this is a posthog property we want to sort by its label + const left = getCoreFilterDefinition(a[0], TaxonomicFilterGroupType.EventProperties)?.label || a[0] + const right = getCoreFilterDefinition(b[0], TaxonomicFilterGroupType.EventProperties)?.label || b[0] + + if (left < right) { + return -1 + } + if (left > right) { + return 1 + } + return 0 + }) + if (searchTerm) { const normalizedSearchTerm = searchTerm.toLowerCase() entries = entries.filter(([key, value]) => { @@ -228,7 +248,11 @@ export function PropertiesTable({ } if (filterable && hidePostHogPropertiesInTable) { - entries = entries.filter(([key]) => !key.startsWith('$') && !PROPERTY_KEYS.includes(key)) + entries = entries.filter(([key]) => { + const isPostHogProperty = key.startsWith('$') && PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = isCloudOrDev && NON_DOLLAR_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + }) } if (sortProperties) { diff --git a/frontend/src/lib/components/PropertiesTimeline/PropertiesTimeline.stories.tsx b/frontend/src/lib/components/PropertiesTimeline/PropertiesTimeline.stories.tsx index 3d38ab578827d..cbfcbf10df59f 100644 --- a/frontend/src/lib/components/PropertiesTimeline/PropertiesTimeline.stories.tsx +++ b/frontend/src/lib/components/PropertiesTimeline/PropertiesTimeline.stories.tsx @@ -27,7 +27,7 @@ export function MultiplePointsForOnePersonProperty(): JSX.Element { const examplePerson: PersonActorType = { ...EXAMPLE_PERSON, id: 1, uuid: '012e89b5-4239-4319-8ae4-d3cae2f5deb1' } useStorybookMocks({ get: { - [`/api/projects/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { + [`/api/environments/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { points: [ { timestamp: '2021-01-01T00:00:00.000Z', @@ -88,7 +88,7 @@ export function OnePointForOnePersonProperty(): JSX.Element { const examplePerson: PersonActorType = { ...EXAMPLE_PERSON, id: 2, uuid: '012e89b5-4239-4319-8ae4-d3cae2f5deb2' } useStorybookMocks({ get: { - [`/api/projects/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { + [`/api/environments/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { points: [ { timestamp: '2021-05-01T00:00:00.000Z', @@ -125,7 +125,7 @@ export function NoPointsForNoPersonProperties(): JSX.Element { const examplePerson: PersonActorType = { ...EXAMPLE_PERSON, id: 3, uuid: '012e89b5-4239-4319-8ae4-d3cae2f5deb3' } useStorybookMocks({ get: { - [`/api/projects/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { + [`/api/environments/${MOCK_TEAM_ID}/persons/${examplePerson.uuid}/properties_timeline/`]: { points: [ { timestamp: '2021-01-01T00:00:00.000Z', diff --git a/frontend/src/lib/components/PropertiesTimeline/propertiesTimelineLogic.ts b/frontend/src/lib/components/PropertiesTimeline/propertiesTimelineLogic.ts index adff5170c3cdf..b1f3c848cfd68 100644 --- a/frontend/src/lib/components/PropertiesTimeline/propertiesTimelineLogic.ts +++ b/frontend/src/lib/components/PropertiesTimeline/propertiesTimelineLogic.ts @@ -66,7 +66,7 @@ export const propertiesTimelineLogic = kea([ if (props.actor.type === 'person') { const queryId = uuid() const response = await apiGetWithTimeToSeeDataTracking( - `api/projects/${values.currentTeamId}/persons/${ + `api/environments/${values.currentTeamId}/persons/${ props.actor.uuid }/properties_timeline/?${toParams(props.filter)}`, values.currentTeamId, diff --git a/frontend/src/lib/components/PropertyFilters/PropertyFilters.tsx b/frontend/src/lib/components/PropertyFilters/PropertyFilters.tsx index 65aa23a5412e7..348a04f5b7c95 100644 --- a/frontend/src/lib/components/PropertyFilters/PropertyFilters.tsx +++ b/frontend/src/lib/components/PropertyFilters/PropertyFilters.tsx @@ -39,6 +39,7 @@ interface PropertyFiltersProps { allowRelativeDateOptions?: boolean disabledReason?: string exactMatchFeatureFlagCohortOperators?: boolean + hideBehavioralCohorts?: boolean } export function PropertyFilters({ @@ -67,6 +68,7 @@ export function PropertyFilters({ allowRelativeDateOptions, disabledReason = undefined, exactMatchFeatureFlagCohortOperators = false, + hideBehavioralCohorts, }: PropertyFiltersProps): JSX.Element { const logicProps = { propertyFilters, onChange, pageKey, sendAllKeyUpdates } const { filters, filtersWithNew } = useValues(propertyFilterLogic(logicProps)) @@ -76,7 +78,7 @@ export function PropertyFilters({ // Update the logic's internal filters when the props change useEffect(() => { setFilters(propertyFilters ?? []) - }, [propertyFilters]) + }, [propertyFilters, setFilters]) // do not open on initial render, only open if newly inserted useEffect(() => { @@ -131,6 +133,7 @@ export function PropertyFilters({ taxonomicFilterOptionsFromProp={taxonomicFilterOptionsFromProp} allowRelativeDateOptions={allowRelativeDateOptions} exactMatchFeatureFlagCohortOperators={exactMatchFeatureFlagCohortOperators} + hideBehavioralCohorts={hideBehavioralCohorts} /> )} errorMessage={errorMessages && errorMessages[index]} diff --git a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx index 7898003971095..e2c10b43233a2 100644 --- a/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx +++ b/frontend/src/lib/components/PropertyFilters/components/TaxonomicPropertyFilter.tsx @@ -49,6 +49,7 @@ export function TaxonomicPropertyFilter({ taxonomicFilterOptionsFromProp, allowRelativeDateOptions, exactMatchFeatureFlagCohortOperators, + hideBehavioralCohorts, }: PropertyFilterInternalProps): JSX.Element { const pageKey = useMemo(() => pageKeyInput || `filter-${uniqueMemoizedIndex++}`, [pageKeyInput]) const groupTypes = taxonomicGroupTypes || [ @@ -114,6 +115,7 @@ export function TaxonomicPropertyFilter({ schemaColumns={schemaColumns} propertyAllowList={propertyAllowList} optionsFromProp={taxonomicFilterOptionsFromProp} + hideBehavioralCohorts={hideBehavioralCohorts} /> ) diff --git a/frontend/src/lib/components/PropertyFilters/types.ts b/frontend/src/lib/components/PropertyFilters/types.ts index 666860d7c2179..44b6ec9490c24 100644 --- a/frontend/src/lib/components/PropertyFilters/types.ts +++ b/frontend/src/lib/components/PropertyFilters/types.ts @@ -51,4 +51,5 @@ export interface PropertyFilterInternalProps { propertyAllowList?: { [key in TaxonomicFilterGroupType]?: string[] } allowRelativeDateOptions?: boolean exactMatchFeatureFlagCohortOperators?: boolean + hideBehavioralCohorts?: boolean } diff --git a/frontend/src/lib/components/PropertySelect/PropertySelect.stories.tsx b/frontend/src/lib/components/PropertySelect/PropertySelect.stories.tsx index 35021b789889b..b42bf7366157d 100644 --- a/frontend/src/lib/components/PropertySelect/PropertySelect.stories.tsx +++ b/frontend/src/lib/components/PropertySelect/PropertySelect.stories.tsx @@ -13,7 +13,7 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/persons/properties': [ + '/api/environments/:team_id/persons/properties': [ { name: 'Property A', count: 10 }, { name: 'Property B', count: 20 }, { name: 'Property C', count: 30 }, diff --git a/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.test.ts b/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.test.ts index 5ea635b7e4f90..8842402310efe 100644 --- a/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.test.ts +++ b/frontend/src/lib/components/ReverseProxyChecker/reverseProxyCheckerLogic.test.ts @@ -11,7 +11,7 @@ const doesNotHaveReverseProxyValues = [[null], [null]] const useMockedValues = (results: (string | null)[][]): void => { useMocks({ post: { - '/api/projects/:team/query': () => [ + '/api/environments/:team_id/query': () => [ 200, { results, diff --git a/frontend/src/lib/components/Sharing/SharingModal.stories.tsx b/frontend/src/lib/components/Sharing/SharingModal.stories.tsx index 8c088002c4cb7..75f527f56da73 100644 --- a/frontend/src/lib/components/Sharing/SharingModal.stories.tsx +++ b/frontend/src/lib/components/Sharing/SharingModal.stories.tsx @@ -36,9 +36,9 @@ const Template = (args: Partial & { licensed?: boolean }): JS useStorybookMocks({ get: { ...[ - '/api/projects/:id/insights/:insight_id/sharing/', - '/api/projects/:id/dashboards/:dashboard_id/sharing/', - '/api/projects/:id/session_recordings/:recording_id/sharing/', + '/api/environments/:id/insights/:insight_id/sharing/', + '/api/environments/:id/dashboards/:dashboard_id/sharing/', + '/api/environments/:id/session_recordings/:recording_id/sharing/', ].reduce( (acc, url) => ({ ...acc, @@ -50,13 +50,13 @@ const Template = (args: Partial & { licensed?: boolean }): JS }), {} ), - '/api/projects/:id/insights/': { results: [fakeInsight] }, + '/api/environments/:id/insights/': { results: [fakeInsight] }, }, patch: { ...[ - '/api/projects/:id/insights/:insight_id/sharing/', - '/api/projects/:id/dashboards/:dashboard_id/sharing/', - '/api/projects/:id/session_recordings/:recording_id/sharing/', + '/api/environments/:id/insights/:insight_id/sharing/', + '/api/environments/:id/dashboards/:dashboard_id/sharing/', + '/api/environments/:id/session_recordings/:recording_id/sharing/', ].reduce( (acc, url) => ({ ...acc, diff --git a/frontend/src/lib/components/Subscriptions/SubscriptionsModal.stories.tsx b/frontend/src/lib/components/Subscriptions/SubscriptionsModal.stories.tsx index ce6bca5ee6903..0dcb793df4383 100644 --- a/frontend/src/lib/components/Subscriptions/SubscriptionsModal.stories.tsx +++ b/frontend/src/lib/components/Subscriptions/SubscriptionsModal.stories.tsx @@ -40,7 +40,7 @@ const Template = ( slack_service: noIntegrations ? { available: false } : { available: true, client_id: 'test-client-id' }, site_url: noIntegrations ? 'bad-value' : window.location.origin, }, - '/api/projects/:id/subscriptions': { + '/api/environments/:id/subscriptions': { results: insightShortIdRef.current === 'empty' ? [] @@ -61,7 +61,7 @@ const Template = ( }), ], }, - '/api/projects/:id/subscriptions/:subId': createMockSubscription(), + '/api/environments/:id/subscriptions/:subId': createMockSubscription(), '/api/projects/:id/integrations': { results: !noIntegrations ? [mockIntegration] : [] }, '/api/projects/:id/integrations/:intId/channels': { channels: mockSlackChannels }, }, diff --git a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.test.ts b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.test.ts index 2cfbe7207bf52..092614abd0d67 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionsLogic.test.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionsLogic.test.ts @@ -54,15 +54,15 @@ describe('subscriptionsLogic', () => { subscriptions = [fixtureSubscriptionResponse(1), fixtureSubscriptionResponse(2)] useMocks({ get: { - '/api/projects/:team/insights/1': fixtureInsightResponse(1), - '/api/projects/:team/insights/2': fixtureInsightResponse(2), - '/api/projects/:team/insights': (req) => { + '/api/environments/:team_id/insights/1': fixtureInsightResponse(1), + '/api/environments/:team_id/insights/2': fixtureInsightResponse(2), + '/api/environments/:team_id/insights': (req) => { const insightShortId = req.url.searchParams.get('short_id') const res = insightShortId ? [fixtureInsightResponse(parseInt(insightShortId, 10))] : [] return [200, { results: res }] }, - '/api/projects/:team/subscriptions': (req) => { + '/api/environments/:team_id/subscriptions': (req) => { const insightId = req.url.searchParams.get('insight') let results: SubscriptionType[] = [] diff --git a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx index cdeb294fa55d1..c65ffd600898f 100644 --- a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx @@ -35,6 +35,7 @@ export function TaxonomicFilter({ popoverEnabled = true, selectFirstItem = true, propertyAllowList, + hideBehavioralCohorts, }: TaxonomicFilterProps): JSX.Element { // Generate a unique key for each unique TaxonomicFilter that's rendered const taxonomicFilterLogicKey = useMemo( @@ -60,6 +61,7 @@ export function TaxonomicFilter({ excludedProperties, metadataSource, propertyAllowList, + hideBehavioralCohorts, } const logic = taxonomicFilterLogic(taxonomicFilterLogicProps) @@ -70,7 +72,7 @@ export function TaxonomicFilter({ if (groupType !== TaxonomicFilterGroupType.HogQLExpression) { window.setTimeout(() => focusInput(), 1) } - }, []) + }, [groupType]) const style = { ...(width ? { width } : {}), diff --git a/frontend/src/lib/components/TaxonomicFilter/__mocks__/taxonomicFilterMocksDecorator.ts b/frontend/src/lib/components/TaxonomicFilter/__mocks__/taxonomicFilterMocksDecorator.ts index 9c29ae50e755c..f5140db2985e0 100644 --- a/frontend/src/lib/components/TaxonomicFilter/__mocks__/taxonomicFilterMocksDecorator.ts +++ b/frontend/src/lib/components/TaxonomicFilter/__mocks__/taxonomicFilterMocksDecorator.ts @@ -4,7 +4,7 @@ import { mockActionDefinition } from '~/test/mocks' export const taxonomicFilterMocksDecorator = mswDecorator({ get: { '/api/projects/:team_id/actions': { results: [mockActionDefinition] }, - '/api/projects/:team_id/persons/properties': [ + '/api/environments/:team_id/persons/properties': [ { id: 1, name: 'location', count: 1 }, { id: 2, name: 'role', count: 2 }, { id: 3, name: 'height', count: 3 }, diff --git a/frontend/src/lib/components/TaxonomicFilter/cohortFilterUtils.ts b/frontend/src/lib/components/TaxonomicFilter/cohortFilterUtils.ts new file mode 100644 index 0000000000000..89ba438fd2fb3 --- /dev/null +++ b/frontend/src/lib/components/TaxonomicFilter/cohortFilterUtils.ts @@ -0,0 +1,43 @@ +import { BehavioralFilterKey } from 'scenes/cohorts/CohortFilters/types' + +import { AnyCohortCriteriaType, CohortCriteriaGroupFilter, CohortType } from '~/types' + +function isCohortCriteriaGroupFilter( + value: AnyCohortCriteriaType | CohortCriteriaGroupFilter +): value is CohortCriteriaGroupFilter { + return (value as CohortCriteriaGroupFilter).type === 'AND' || (value as CohortCriteriaGroupFilter).type === 'OR' +} + +const hasBehavioralFilter = (cohort: CohortType, allCohorts: CohortType[]): boolean => { + const checkCriteriaGroup = (group: CohortCriteriaGroupFilter): boolean => { + return group.values.some((value) => { + if (isCohortCriteriaGroupFilter(value)) { + return checkCriteriaGroup(value) + } + if (value.type === BehavioralFilterKey.Behavioral) { + return true + } + if (value.type === BehavioralFilterKey.Cohort) { + // the first time we load the page we haven't transformed the cohort data, + // so there's no value_property, and we need to use `value.value` instead. + const cohortId = value.value_property || value.value + const nestedCohort = allCohorts.find((item) => item.id === cohortId) + if (nestedCohort) { + return hasBehavioralFilter(nestedCohort, allCohorts) + } + return false + } + return false + }) + } + + return cohort.filters?.properties ? checkCriteriaGroup(cohort.filters.properties) : false +} + +export const filterOutBehavioralCohorts = (items: CohortType[], hideBehavioralCohorts?: boolean): CohortType[] => { + if (!hideBehavioralCohorts) { + return items + } + + return items.filter((item) => !hasBehavioralFilter(item, items)) +} diff --git a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts index cf04e58e218ae..25e2866afc2c3 100644 --- a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts +++ b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts @@ -9,7 +9,10 @@ import { AppContext, PropertyDefinition } from '~/types' import { infiniteListLogic } from './infiniteListLogic' -window.POSTHOG_APP_CONTEXT = { current_team: { id: MOCK_TEAM_ID } } as unknown as AppContext +window.POSTHOG_APP_CONTEXT = { + current_team: { id: MOCK_TEAM_ID }, + current_project: { id: MOCK_TEAM_ID }, +} as unknown as AppContext describe('infiniteListLogic', () => { let logic: ReturnType diff --git a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts index 8746ae84b9868..eb8ca78c45ec4 100644 --- a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts +++ b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts @@ -20,6 +20,7 @@ import { CohortType, EventDefinition } from '~/types' import { teamLogic } from '../../../scenes/teamLogic' import { captureTimeToSeeData } from '../../internalMetrics' +import { filterOutBehavioralCohorts } from './cohortFilterUtils' import type { infiniteListLogicType } from './infiniteListLogicType' /* @@ -240,11 +241,21 @@ export const infiniteListLogic = kea([ hasRemoteDataSource: [(s) => [s.remoteEndpoint], (remoteEndpoint) => !!remoteEndpoint], rawLocalItems: [ (selectors) => [ - (state, props) => { + (state, props: InfiniteListLogicProps) => { const taxonomicGroups = selectors.taxonomicGroups(state) const group = taxonomicGroups.find((g) => g.type === props.listGroupType) + if (group?.logic && group?.value) { - return group.logic.selectors[group.value]?.(state) || null + const items = group.logic.selectors[group.value]?.(state) + // TRICKY: Feature flags don't support dynamic behavioral cohorts, + // so we don't want to show them as selectable options in the taxonomic filter + // in the feature flag UI. + // TODO: Once we support dynamic behavioral cohorts, we should show them in the taxonomic filter, + // and remove this kludge. + if (Array.isArray(items) && items.every((item) => 'filters' in item)) { + return filterOutBehavioralCohorts(items, props.hideBehavioralCohorts) + } + return items } if (group?.options) { return group.options diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts index 74278c510754e..90103bc8686ad 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.test.ts @@ -12,7 +12,10 @@ import { AppContext } from '~/types' import { infiniteListLogic } from './infiniteListLogic' -window.POSTHOG_APP_CONTEXT = { current_team: { id: MOCK_TEAM_ID } } as unknown as AppContext +window.POSTHOG_APP_CONTEXT = { + current_team: { id: MOCK_TEAM_ID }, + current_project: { id: MOCK_TEAM_ID }, +} as unknown as AppContext describe('taxonomicFilterLogic', () => { let logic: ReturnType @@ -33,7 +36,7 @@ describe('taxonomicFilterLogic', () => { }, ] }, - '/api/projects/:team/sessions/property_definitions': (res) => { + '/api/environments/:team/sessions/property_definitions': (res) => { const search = res.url.searchParams.get('search') const results = search ? mockSessionPropertyDefinitions.filter((e) => e.name.includes(search)) diff --git a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx index a93ba76878fe0..4f78288aa2c75 100644 --- a/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/taxonomicFilterLogic.tsx @@ -20,6 +20,7 @@ import { dataWarehouseSceneLogic } from 'scenes/data-warehouse/settings/dataWare import { experimentsLogic } from 'scenes/experiments/experimentsLogic' import { featureFlagsLogic } from 'scenes/feature-flags/featureFlagsLogic' import { groupDisplayId } from 'scenes/persons/GroupActorDisplay' +import { projectLogic } from 'scenes/projectLogic' import { ReplayTaxonomicFilters } from 'scenes/session-recordings/filters/ReplayTaxonomicFilters' import { teamLogic } from 'scenes/teamLogic' @@ -79,6 +80,8 @@ export const taxonomicFilterLogic = kea([ values: [ teamLogic, ['currentTeamId'], + projectLogic, + ['currentProjectId'], groupsModel, ['groupTypes', 'aggregationLabel'], groupPropertiesModel, @@ -159,6 +162,7 @@ export const taxonomicFilterLogic = kea([ taxonomicGroups: [ (s) => [ s.currentTeamId, + s.currentProjectId, s.groupAnalyticsTaxonomicGroups, s.groupAnalyticsTaxonomicGroupNames, s.eventNames, @@ -169,6 +173,7 @@ export const taxonomicFilterLogic = kea([ ], ( teamId, + projectId, groupAnalyticsTaxonomicGroups, groupAnalyticsTaxonomicGroupNames, eventNames, @@ -185,7 +190,7 @@ export const taxonomicFilterLogic = kea([ options: [{ name: 'All events', value: null }].filter( (o) => !excludedProperties[TaxonomicFilterGroupType.Events]?.includes(o.value) ), - endpoint: combineUrl(`api/projects/${teamId}/event_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/event_definitions`, { event_type: EventDefinitionType.Event, }).url, getName: (eventDefinition: Record) => eventDefinition.name, @@ -261,7 +266,7 @@ export const taxonomicFilterLogic = kea([ name: 'Event properties', searchPlaceholder: 'event properties', type: TaxonomicFilterGroupType.EventProperties, - endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/property_definitions`, { is_feature_flag: false, ...(eventNames.length > 0 ? { event_names: eventNames } : {}), properties: propertyAllowList?.[TaxonomicFilterGroupType.EventProperties] @@ -270,7 +275,7 @@ export const taxonomicFilterLogic = kea([ }).url, scopedEndpoint: eventNames.length > 0 - ? combineUrl(`api/projects/${teamId}/property_definitions`, { + ? combineUrl(`api/projects/${projectId}/property_definitions`, { event_names: eventNames, is_feature_flag: false, filter_by_event_names: true, @@ -296,13 +301,13 @@ export const taxonomicFilterLogic = kea([ name: 'Feature flags', searchPlaceholder: 'feature flags', type: TaxonomicFilterGroupType.EventFeatureFlags, - endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/property_definitions`, { is_feature_flag: true, ...(eventNames.length > 0 ? { event_names: eventNames } : {}), }).url, scopedEndpoint: eventNames.length > 0 - ? combineUrl(`api/projects/${teamId}/property_definitions`, { + ? combineUrl(`api/projects/${projectId}/property_definitions`, { event_names: eventNames, is_feature_flag: true, filter_by_event_names: true, @@ -324,7 +329,7 @@ export const taxonomicFilterLogic = kea([ name: 'Numerical event properties', searchPlaceholder: 'numerical event properties', type: TaxonomicFilterGroupType.NumericalEventProperties, - endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/property_definitions`, { is_numerical: true, event_names: eventNames, }).url, @@ -336,7 +341,7 @@ export const taxonomicFilterLogic = kea([ name: 'Person properties', searchPlaceholder: 'person properties', type: TaxonomicFilterGroupType.PersonProperties, - endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/property_definitions`, { type: 'person', properties: propertyAllowList?.[TaxonomicFilterGroupType.PersonProperties] ? propertyAllowList[TaxonomicFilterGroupType.PersonProperties].join(',') @@ -377,7 +382,7 @@ export const taxonomicFilterLogic = kea([ name: 'Pageview URLs', searchPlaceholder: 'pageview URLs', type: TaxonomicFilterGroupType.PageviewUrls, - endpoint: `api/projects/${teamId}/events/values/?key=$current_url`, + endpoint: `api/environments/${teamId}/events/values/?key=$current_url`, searchAlias: 'value', getName: (option: SimpleOption) => option.name, getValue: (option: SimpleOption) => option.name, @@ -387,7 +392,7 @@ export const taxonomicFilterLogic = kea([ name: 'Screens', searchPlaceholder: 'screens', type: TaxonomicFilterGroupType.Screens, - endpoint: `api/projects/${teamId}/events/values/?key=$screen_name`, + endpoint: `api/environments/${teamId}/events/values/?key=$screen_name`, searchAlias: 'value', getName: (option: SimpleOption) => option.name, getValue: (option: SimpleOption) => option.name, @@ -397,7 +402,7 @@ export const taxonomicFilterLogic = kea([ name: 'Custom Events', searchPlaceholder: 'custom events', type: TaxonomicFilterGroupType.CustomEvents, - endpoint: combineUrl(`api/projects/${teamId}/event_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/event_definitions`, { event_type: EventDefinitionType.EventCustom, }).url, getName: (eventDefinition: EventDefinition) => eventDefinition.name, @@ -417,7 +422,7 @@ export const taxonomicFilterLogic = kea([ name: 'Persons', searchPlaceholder: 'persons', type: TaxonomicFilterGroupType.Persons, - endpoint: `api/projects/${teamId}/persons/`, + endpoint: `api/environments/${teamId}/persons/`, getName: (person: PersonType) => person.name || 'Anon user?', getValue: (person: PersonType) => person.distinct_ids[0], getPopoverHeader: () => `Person`, @@ -426,7 +431,7 @@ export const taxonomicFilterLogic = kea([ name: 'Insights', searchPlaceholder: 'insights', type: TaxonomicFilterGroupType.Insights, - endpoint: combineUrl(`api/projects/${teamId}/insights/`, { + endpoint: combineUrl(`api/environments/${teamId}/insights/`, { saved: true, }).url, getName: (insight: QueryBasedInsightModel) => insight.name, @@ -481,7 +486,7 @@ export const taxonomicFilterLogic = kea([ getName: (option: any) => option.name, getValue: (option) => option.name, getPopoverHeader: () => 'Session', - endpoint: `api/projects/${teamId}/sessions/property_definitions`, + endpoint: `api/environments/${teamId}/sessions/property_definitions`, getIcon: getPropertyDefinitionIcon, }, { @@ -500,6 +505,7 @@ export const taxonomicFilterLogic = kea([ valuesEndpoint: (key) => { if (key === 'visited_page') { return ( + `api/environments/${teamId}/events/values/?key=` + 'api/event/values/?key=' + encodeURIComponent('$current_url') + '&event_name=' + @@ -532,7 +538,7 @@ export const taxonomicFilterLogic = kea([ name: `${capitalizeFirstLetter(aggregationLabel(type.group_type_index).plural)}`, searchPlaceholder: `${aggregationLabel(type.group_type_index).plural}`, type: `${TaxonomicFilterGroupType.GroupNamesPrefix}_${type.group_type_index}` as unknown as TaxonomicFilterGroupType, - endpoint: combineUrl(`api/projects/${teamId}/groups/`, { + endpoint: combineUrl(`api/environments/${teamId}/groups/`, { group_type_index: type.group_type_index, }).url, searchAlias: 'group_key', @@ -543,13 +549,13 @@ export const taxonomicFilterLogic = kea([ })), ], groupAnalyticsTaxonomicGroups: [ - (s) => [s.groupTypes, s.currentTeamId, s.aggregationLabel], - (groupTypes, teamId, aggregationLabel): TaxonomicFilterGroup[] => + (s) => [s.groupTypes, s.currentProjectId, s.currentTeamId, s.aggregationLabel], + (groupTypes, projectId, teamId, aggregationLabel): TaxonomicFilterGroup[] => Array.from(groupTypes.values()).map((type) => ({ name: `${capitalizeFirstLetter(aggregationLabel(type.group_type_index).singular)} properties`, searchPlaceholder: `${aggregationLabel(type.group_type_index).singular} properties`, type: `${TaxonomicFilterGroupType.GroupsPrefix}_${type.group_type_index}` as unknown as TaxonomicFilterGroupType, - endpoint: combineUrl(`api/projects/${teamId}/property_definitions`, { + endpoint: combineUrl(`api/projects/${projectId}/property_definitions`, { type: 'group', group_type_index: type.group_type_index, }).url, diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index a496095b8471b..40931c4ef93e3 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -37,6 +37,7 @@ export interface TaxonomicFilterProps { excludedProperties?: { [key in TaxonomicFilterGroupType]?: TaxonomicFilterValue[] } propertyAllowList?: { [key in TaxonomicFilterGroupType]?: string[] } // only return properties in this list, currently only working for EventProperties and PersonProperties metadataSource?: AnyDataNode + hideBehavioralCohorts?: boolean } export interface TaxonomicFilterLogicProps extends TaxonomicFilterProps { diff --git a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts index 2107640885e26..103e89b2bfed9 100644 --- a/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts +++ b/frontend/src/lib/components/VersionChecker/versionCheckerLogic.test.ts @@ -24,7 +24,7 @@ const useMockedVersions = ( ], }, post: { - '/api/projects/:team/query': () => [ + '/api/environments/:team_id/query': () => [ 200, { results: usedVersions.map((x) => [x.version, x.timestamp]), diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 5d224fce08e5d..36db4c50cdc65 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -219,6 +219,7 @@ export const FEATURE_FLAGS = { ENVIRONMENTS: 'environments', // owner: @Twixes #team-product-analytics BILLING_PAYMENT_ENTRY_IN_APP: 'billing-payment-entry-in-app', // owner: @zach LEGACY_ACTION_WEBHOOKS: 'legacy-action-webhooks', // owner: @mariusandra #team-cdp + SESSION_REPLAY_URL_TRIGGER: 'session-replay-url-trigger', // owner: @richard-better #team-replay } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/integrations/integrationsLogic.ts b/frontend/src/lib/integrations/integrationsLogic.ts index fe5cd94ecf5e7..dd5f28959706f 100644 --- a/frontend/src/lib/integrations/integrationsLogic.ts +++ b/frontend/src/lib/integrations/integrationsLogic.ts @@ -4,6 +4,7 @@ import { loaders } from 'kea-loaders' import { router, urlToAction } from 'kea-router' import api from 'lib/api' import { fromParamsGivenUrl } from 'lib/utils' +import IconGoogleAds from 'public/services/google-ads.png' import IconGoogleCloud from 'public/services/google-cloud.png' import IconGoogleCloudStorage from 'public/services/google-cloud-storage.png' import IconHubspot from 'public/services/hubspot.png' @@ -22,6 +23,7 @@ const ICONS: Record = { hubspot: IconHubspot, 'google-pubsub': IconGoogleCloud, 'google-cloud-storage': IconGoogleCloudStorage, + 'google-ads': IconGoogleAds, } export const integrationsLogic = kea([ diff --git a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx index 007e1adf46189..3129a068d64c9 100644 --- a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx @@ -115,7 +115,7 @@ export function LemonInputSelect({ // Show the input value if custom values are allowed and it's not in the list if (inputValue && !values.includes(inputValue)) { if (allowCustomValues) { - const unescapedInputValue = inputValue.replace('\\,', ',') // Transform escaped commas to plain commas + const unescapedInputValue = inputValue.replaceAll('\\,', ',') // Transform escaped commas to plain commas ret.push({ key: unescapedInputValue, label: unescapedInputValue, __isInput: true }) } } else if (mode === 'single' && values.length > 0) { @@ -164,7 +164,7 @@ export function LemonInputSelect({ // We split on commas EXCEPT if they're escaped (to allow for commas in values) newValue.split(NON_ESCAPED_COMMA_REGEX).forEach((value) => { - const trimmedValue = value.replace('\\,', ',').trim() // Transform escaped commas to plain commas + const trimmedValue = value.replaceAll('\\,', ',').trim() // Transform escaped commas to plain commas if (trimmedValue && !values.includes(trimmedValue)) { newValues.push(trimmedValue) } diff --git a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx index 63a351c4a1efe..f7798f0065378 100644 --- a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx +++ b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx @@ -17,6 +17,8 @@ export interface LemonMenuItemBase > { label: string | JSX.Element key?: React.Key + /** @deprecated You're probably doing something wrong if you're setting per-item classes. */ + className?: string /** True if the item is a custom element. */ custom?: boolean } diff --git a/frontend/src/lib/lemon-ui/icons/categories.ts b/frontend/src/lib/lemon-ui/icons/categories.ts index 1b7b97b39abe5..879673b1c94c1 100644 --- a/frontend/src/lib/lemon-ui/icons/categories.ts +++ b/frontend/src/lib/lemon-ui/icons/categories.ts @@ -198,7 +198,7 @@ export const TEAMS_AND_COMPANIES = { 'IconRewindPlay', 'IconVideoCamera', ], - 'Feature Success': ['IconFlask', 'IconTestTube', 'IconMultivariateTesting', 'IconSplitTesting'], + 'Feature Success': ['IconFlask', 'IconTestTube', 'IconMultivariateTesting', 'IconSplitTesting', 'IconBalance'], Pipeline: ['IconWebhooks', 'IconDecisionTree'], 'Product OS': ['IconNotebook', 'IconHogQL', 'IconDashboard', 'IconSupport'], Logos: ['IconLogomark', 'IconGithub'], diff --git a/frontend/src/lib/monaco/CodeEditor.tsx b/frontend/src/lib/monaco/CodeEditor.tsx index e9ba319069831..769c7bb2ad01d 100644 --- a/frontend/src/lib/monaco/CodeEditor.tsx +++ b/frontend/src/lib/monaco/CodeEditor.tsx @@ -6,12 +6,10 @@ import { Spinner } from 'lib/lemon-ui/Spinner' import { codeEditorLogic } from 'lib/monaco/codeEditorLogic' import { codeEditorLogicType } from 'lib/monaco/codeEditorLogicType' import { findNextFocusableElement, findPreviousFocusableElement } from 'lib/monaco/domUtils' -import { hogQLAutocompleteProvider } from 'lib/monaco/hogQLAutocompleteProvider' -import { hogQLMetadataProvider } from 'lib/monaco/hogQLMetadataProvider' -import * as hog from 'lib/monaco/languages/hog' -import * as hogJson from 'lib/monaco/languages/hogJson' -import * as hogQL from 'lib/monaco/languages/hogQL' -import * as hogTemplate from 'lib/monaco/languages/hogTemplate' +import { initHogLanguage } from 'lib/monaco/languages/hog' +import { initHogJsonLanguage } from 'lib/monaco/languages/hogJson' +import { initHogQLLanguage } from 'lib/monaco/languages/hogQL' +import { initHogTemplateLanguage } from 'lib/monaco/languages/hogTemplate' import { inStorybookTestRunner } from 'lib/utils' import { editor, editor as importedEditor, IDisposable } from 'monaco-editor' import * as monaco from 'monaco-editor' @@ -47,61 +45,16 @@ function initEditor( ;(model as any).codeEditorLogic = builtCodeEditorLogic if (editorProps?.language === 'hog') { - if (!monaco.languages.getLanguages().some(({ id }) => id === 'hog')) { - monaco.languages.register({ id: 'hog', extensions: ['.hog'], mimetypes: ['application/hog'] }) - monaco.languages.setLanguageConfiguration('hog', hog.conf()) - monaco.languages.setMonarchTokensProvider('hog', hog.language()) - monaco.languages.registerCompletionItemProvider('hog', hogQLAutocompleteProvider(HogLanguage.hog)) - monaco.languages.registerCodeActionProvider('hog', hogQLMetadataProvider()) - } + initHogLanguage(monaco) } if (editorProps?.language === 'hogQL' || editorProps?.language === 'hogQLExpr') { - const language: HogLanguage = editorProps.language as HogLanguage - if (!monaco.languages.getLanguages().some(({ id }) => id === language)) { - monaco.languages.register( - language === 'hogQL' - ? { - id: language, - extensions: ['.sql', '.hogql'], - mimetypes: ['application/hogql'], - } - : { - id: language, - mimetypes: ['application/hogql+expr'], - } - ) - monaco.languages.setLanguageConfiguration(language, hogQL.conf()) - monaco.languages.setMonarchTokensProvider(language, hogQL.language()) - monaco.languages.registerCompletionItemProvider(language, hogQLAutocompleteProvider(language)) - monaco.languages.registerCodeActionProvider(language, hogQLMetadataProvider()) - } + initHogQLLanguage(monaco, editorProps.language as HogLanguage) } if (editorProps?.language === 'hogTemplate') { - if (!monaco.languages.getLanguages().some(({ id }) => id === 'hogTemplate')) { - monaco.languages.register({ - id: 'hogTemplate', - mimetypes: ['application/hog+template'], - }) - monaco.languages.setLanguageConfiguration('hogTemplate', hogTemplate.conf()) - monaco.languages.setMonarchTokensProvider('hogTemplate', hogTemplate.language()) - monaco.languages.registerCompletionItemProvider( - 'hogTemplate', - hogQLAutocompleteProvider(HogLanguage.hogTemplate) - ) - monaco.languages.registerCodeActionProvider('hogTemplate', hogQLMetadataProvider()) - } + initHogTemplateLanguage(monaco) } if (editorProps?.language === 'hogJson') { - if (!monaco.languages.getLanguages().some(({ id }) => id === 'hogJson')) { - monaco.languages.register({ - id: 'hogJson', - mimetypes: ['application/hog+json'], - }) - monaco.languages.setLanguageConfiguration('hogJson', hogJson.conf()) - monaco.languages.setMonarchTokensProvider('hogJson', hogJson.language()) - monaco.languages.registerCompletionItemProvider('hogJson', hogQLAutocompleteProvider(HogLanguage.hogJson)) - monaco.languages.registerCodeActionProvider('hogJson', hogQLMetadataProvider()) - } + initHogJsonLanguage(monaco) } if (options.tabFocusMode) { editor.onKeyDown((evt) => { diff --git a/frontend/src/lib/monaco/languages/hog.ts b/frontend/src/lib/monaco/languages/hog.ts index 6bbeea6451b5e..cb32ea4359c5b 100644 --- a/frontend/src/lib/monaco/languages/hog.ts +++ b/frontend/src/lib/monaco/languages/hog.ts @@ -3,8 +3,13 @@ // Adapted from: https://raw.githubusercontent.com/microsoft/monaco-editor/main/src/basic-languages/typescript/typescript.ts +import { Monaco } from '@monaco-editor/react' +import { hogQLAutocompleteProvider } from 'lib/monaco/hogQLAutocompleteProvider' +import { hogQLMetadataProvider } from 'lib/monaco/hogQLMetadataProvider' import { languages } from 'monaco-editor' +import { HogLanguage } from '~/queries/schema' + export const conf: () => languages.LanguageConfiguration = () => ({ wordPattern: /(-?\d*\.\d\w*)|([^\`\~\!\@\#\%\^\&\*\(\)\-\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\?\s]+)/g, @@ -244,3 +249,13 @@ export const language: () => languages.IMonarchLanguage = () => ({ ], }, }) + +export function initHogLanguage(monaco: Monaco): void { + if (!monaco.languages.getLanguages().some(({ id }) => id === 'hog')) { + monaco.languages.register({ id: 'hog', extensions: ['.hog'], mimetypes: ['application/hog'] }) + monaco.languages.setLanguageConfiguration('hog', conf()) + monaco.languages.setMonarchTokensProvider('hog', language()) + monaco.languages.registerCompletionItemProvider('hog', hogQLAutocompleteProvider(HogLanguage.hog)) + monaco.languages.registerCodeActionProvider('hog', hogQLMetadataProvider()) + } +} diff --git a/frontend/src/lib/monaco/languages/hogJson.ts b/frontend/src/lib/monaco/languages/hogJson.ts index c0898e6074893..74de239a1071a 100644 --- a/frontend/src/lib/monaco/languages/hogJson.ts +++ b/frontend/src/lib/monaco/languages/hogJson.ts @@ -1,7 +1,12 @@ // eslint-disable-next-line eslint-comments/disable-enable-pair /* eslint-disable no-useless-escape */ +import { Monaco } from '@monaco-editor/react' +import { hogQLAutocompleteProvider } from 'lib/monaco/hogQLAutocompleteProvider' +import { hogQLMetadataProvider } from 'lib/monaco/hogQLMetadataProvider' import { languages } from 'monaco-editor' +import { HogLanguage } from '~/queries/schema' + import { conf as _conf, language as _language } from './hog' export const conf: () => languages.LanguageConfiguration = () => ({ @@ -151,3 +156,16 @@ export const language: () => languages.IMonarchLanguage = () => ({ ], }, }) + +export function initHogJsonLanguage(monaco: Monaco): void { + if (!monaco.languages.getLanguages().some(({ id }) => id === 'hogJson')) { + monaco.languages.register({ + id: 'hogJson', + mimetypes: ['application/hog+json'], + }) + monaco.languages.setLanguageConfiguration('hogJson', conf()) + monaco.languages.setMonarchTokensProvider('hogJson', language()) + monaco.languages.registerCompletionItemProvider('hogJson', hogQLAutocompleteProvider(HogLanguage.hogJson)) + monaco.languages.registerCodeActionProvider('hogJson', hogQLMetadataProvider()) + } +} diff --git a/frontend/src/lib/monaco/languages/hogQL.ts b/frontend/src/lib/monaco/languages/hogQL.ts index b0325e548084f..0babd8d3b6bdf 100644 --- a/frontend/src/lib/monaco/languages/hogQL.ts +++ b/frontend/src/lib/monaco/languages/hogQL.ts @@ -1,7 +1,12 @@ // Adapted from https://raw.githubusercontent.com/microsoft/monaco-editor/main/src/basic-languages/mysql/mysql.ts +import { Monaco } from '@monaco-editor/react' +import { hogQLAutocompleteProvider } from 'lib/monaco/hogQLAutocompleteProvider' +import { hogQLMetadataProvider } from 'lib/monaco/hogQLMetadataProvider' import { languages } from 'monaco-editor' +import { HogLanguage } from '~/queries/schema' + export const conf: () => languages.LanguageConfiguration = () => ({ comments: { lineComment: '--', @@ -845,3 +850,24 @@ export const language: () => languages.IMonarchLanguage = () => ({ ], }, }) + +export function initHogQLLanguage(monaco: Monaco, lang: HogLanguage = HogLanguage.hogQL): void { + if (!monaco.languages.getLanguages().some(({ id }) => id === lang)) { + monaco.languages.register( + lang === 'hogQL' + ? { + id: lang, + extensions: ['.sql', '.hogql'], + mimetypes: ['application/hogql'], + } + : { + id: lang, + mimetypes: ['application/hogql+expr'], + } + ) + monaco.languages.setLanguageConfiguration(lang, conf()) + monaco.languages.setMonarchTokensProvider(lang, language()) + monaco.languages.registerCompletionItemProvider(lang, hogQLAutocompleteProvider(lang)) + monaco.languages.registerCodeActionProvider(lang, hogQLMetadataProvider()) + } +} diff --git a/frontend/src/lib/monaco/languages/hogTemplate.ts b/frontend/src/lib/monaco/languages/hogTemplate.ts index 17351906faef2..c0424086c330f 100644 --- a/frontend/src/lib/monaco/languages/hogTemplate.ts +++ b/frontend/src/lib/monaco/languages/hogTemplate.ts @@ -1,7 +1,12 @@ // eslint-disable-next-line eslint-comments/disable-enable-pair /* eslint-disable no-useless-escape */ +import { Monaco } from '@monaco-editor/react' +import { hogQLAutocompleteProvider } from 'lib/monaco/hogQLAutocompleteProvider' +import { hogQLMetadataProvider } from 'lib/monaco/hogQLMetadataProvider' import { languages } from 'monaco-editor' +import { HogLanguage } from '~/queries/schema' + import { conf as _conf, language as _language } from './hog' export const conf: () => languages.LanguageConfiguration = () => ({ @@ -124,3 +129,19 @@ export const language: () => languages.IMonarchLanguage = () => ({ ], }, }) + +export function initHogTemplateLanguage(monaco: Monaco): void { + if (!monaco.languages.getLanguages().some(({ id }) => id === 'hogTemplate')) { + monaco.languages.register({ + id: 'hogTemplate', + mimetypes: ['application/hog+template'], + }) + monaco.languages.setLanguageConfiguration('hogTemplate', conf()) + monaco.languages.setMonarchTokensProvider('hogTemplate', language()) + monaco.languages.registerCompletionItemProvider( + 'hogTemplate', + hogQLAutocompleteProvider(HogLanguage.hogTemplate) + ) + monaco.languages.registerCodeActionProvider('hogTemplate', hogQLMetadataProvider()) + } +} diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index 54c6759159a01..76c60960eec8e 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -249,6 +249,80 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: 'If autocapture has been disabled server-side.', system: true, }, + $feature_flag_payloads: { + label: 'Feature Flag Payloads', + description: 'Feature flag payloads active in the environment.', + }, + $capture_failed_request: { + label: 'Capture Failed Request', + description: '', + }, + $lib_rate_limit_remaining_tokens: { + label: 'Clientside rate limit remaining tokens', + description: ( + + Remaining rate limit tokens for the posthog-js library client-side rate limiting implementation. + + ), + examples: ['100'], + }, + token: { + label: 'Token', + description: Token used for authentication., + examples: ['ph_abcdefg'], + }, + $ce_version: { + label: '$ce_version', + description: '', + system: true, + }, + $anon_distinct_id: { + label: 'Anon Distinct ID', + description: 'If the user was previously anonymous, their anonymous ID will be set here.', + examples: ['16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767'], + system: true, + }, + $event_type: { + label: 'Event Type', + description: + 'When the event is an $autocapture event, this specifies what the action was against the element.', + examples: ['click', 'submit', 'change'], + }, + $insert_id: { + label: 'Insert ID', + description: 'Unique insert ID for the event.', + system: true, + }, + $time: { + label: '$time (deprecated)', + description: + 'Use the HogQL field `timestamp` instead. This field was previously set on some client side events.', + system: true, + examples: ['1681211521.345'], + }, + $device_id: { + label: 'Device ID', + description: 'Unique ID for that device, consistent even if users are logging in/out.', + examples: ['16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767'], + system: true, + }, + $browser_type: { + label: 'Browser Type', + description: 'This is only added when posthog-js config.opt_out_useragent_filter is true.', + examples: ['browser', 'bot'], + }, + + // session recording + $replay_minimum_duration: { + label: 'Replay config - minimum duration', + description: Config for minimum duration before emitting a session recording., + examples: ['1000'], + }, + $replay_sample_rate: { + label: 'Replay config - sample rate', + description: Config for sampling rate of session recordings., + examples: ['0.1'], + }, $console_log_recording_enabled_server_side: { label: 'Console Log Recording Enabled Server-Side', description: 'If console log recording has been enabled server-side.', @@ -260,14 +334,44 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { examples: ['v2'], system: true, }, - $feature_flag_payloads: { - label: 'Feature Flag Payloads', - description: 'Feature flag payloads active in the environment.', + $session_recording_start_reason: { + label: 'Session recording start reason', + description: ( + + Reason for starting the session recording. Useful for e.g. if you have sampling enabled and want to + see on batch exported events which sessions have recordings available. + + ), + examples: ['sampling_override', 'recording_initialized', 'linked_flag_match'], }, - $capture_failed_request: { - label: 'Capture Failed Request', - description: '', + $session_recording_canvas_recording: { + label: 'Session recording canvas recording', + description: Session recording canvas capture config., + examples: ['{"enabled": false}'], + }, + $session_recording_network_payload_capture: { + label: 'Session recording network payload capture', + description: Session recording network payload capture config., + examples: ['{"recordHeaders": false}'], + }, + $session_recording_url_trigger_activated_session: { + label: 'Session recording URL trigger activated session', + description: ( + + Session recording URL trigger activated session config. Used by posthog-js to track URL activation + of session replay. + + ), }, + $session_recording_url_trigger_status: { + label: 'Session recording URL trigger status', + description: ( + + Session recording URL trigger status. Used by posthog-js to track URL activation of session replay. + + ), + }, + // exception tracking $sentry_exception: { label: 'Sentry exception', description: 'Raw Sentry exception data', @@ -324,41 +428,21 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { label: 'Exception person URL', description: 'The PostHog person that experienced the exception', }, - $ce_version: { - label: '$ce_version', - description: '', - system: true, + $exception_capture_endpoint: { + label: 'Exception capture endpoint', + description: Endpoint used by posthog-js exception autocapture., + examples: ['/e/'], }, - $anon_distinct_id: { - label: 'Anon Distinct ID', - description: 'If the user was previously anonymous, their anonymous ID will be set here.', - examples: ['16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767'], - system: true, + $exception_capture_endpoint_suffix: { + label: 'Exception capture endpoint', + description: Endpoint used by posthog-js exception autocapture., + examples: ['/e/'], }, - $event_type: { - label: 'Event Type', - description: - 'When the event is an $autocapture event, this specifies what the action was against the element.', - examples: ['click', 'submit', 'change'], - }, - $insert_id: { - label: 'Insert ID', - description: 'Unique insert ID for the event.', - system: true, - }, - $time: { - label: '$time (deprecated)', - description: - 'Use the HogQL field `timestamp` instead. This field was previously set on some client side events.', - system: true, - examples: ['1681211521.345'], - }, - $device_id: { - label: 'Device ID', - description: 'Unique ID for that device, consistent even if users are logging in/out.', - examples: ['16ff262c4301e5-0aa346c03894bc-39667c0e-1aeaa0-16ff262c431767'], - system: true, + $exception_capture_enabled_server_side: { + label: 'Exception capture enabled server side', + description: Whether exception autocapture was enabled in remote config., }, + // GeoIP $geoip_city_name: { label: 'City Name', @@ -435,6 +519,27 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { label: 'GeoIP Disabled', description: `Whether to skip GeoIP processing for the event.`, }, + $geoip_city_confidence: { + label: 'GeoIP detection city confidence', + description: "Confidence level of the city matched to this event's IP address.", + examples: ['0.5'], + }, + $geoip_country_confidence: { + label: 'GeoIP detection country confidence', + description: "Confidence level of the country matched to this event's IP address.", + examples: ['0.5'], + }, + $geoip_accuracy_radius: { + label: 'GeoIP detection accuracy radius', + description: "Accuracy radius of the location matched to this event's IP address.", + examples: ['50'], + }, + $geoip_subdivision_1_confidence: { + label: 'GeoIP detection subdivision 1 confidence', + description: "Confidence level of the first subdivision matched to this event's IP address.", + examples: ['0.5'], + }, + $el_text: { label: 'Element Text', description: `The text of the element that was clicked. Only sent with Autocapture events.`, @@ -1017,7 +1122,10 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { label: 'Is Identified', description: 'When the person was identified', }, - + $initial_person_info: { + label: 'Initial Person Info', + description: 'posthog-js initial person information. used in the $set_once flow', + }, // web vitals properties $web_vitals_enabled_server_side: { label: 'Web vitals enabled server side', @@ -1047,6 +1155,67 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { $web_vitals_CLS_value: { label: 'Web vitals CLS value', }, + $web_vitals_allowed_metrics: { + label: 'Web vitals allowed metrics', + description: Allowed web vitals metrics config., + examples: ['["LCP", "CLS"]'], + }, + + // page leave properties + $prev_pageview_last_scroll: { + label: 'Previous pageview last scroll', + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + examples: [0], + }, + $prev_pageview_last_scroll_percentage: { + label: 'Previous pageview last scroll percentage', + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + examples: [0], + }, + $prev_pageview_max_scroll: { + examples: [0], + label: 'Previous pageview max scroll', + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + }, + $prev_pageview_max_scroll_percentage: { + examples: [0], + label: 'Previous pageview max scroll percentage', + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + }, + $prev_pageview_last_content: { + examples: [0], + label: 'Previous pageview last content', + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + }, + $prev_pageview_last_content_percentage: { + examples: [0], + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + label: 'Previous pageview last content percentage', + }, + $prev_pageview_max_content: { + examples: [0], + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + label: 'Previous pageview max content', + }, + $prev_pageview_max_content_percentage: { + examples: [0], + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + label: 'Previous pageview max content percentage', + }, + $prev_pageview_pathname: { + examples: ['/pricing', '/about-us/team'], + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + label: 'Previous pageview pathname', + }, + $prev_pageview_duration: { + examples: [0], + description: 'posthog-js adds these to the page leave event, they are used in web analytics calculations', + label: 'Previous pageview duration', + }, + $surveys_activated: { + label: 'Surveys Activated', + description: 'The surveys that were activated for this event.', + }, }, numerical_event_properties: {}, // Same as event properties, see assignment below person_properties: {}, // Currently person properties are the same as event properties, see assignment below @@ -1242,6 +1411,81 @@ CORE_FILTER_DEFINITIONS_BY_GROUP.event_properties.$session_duration = export const PROPERTY_KEYS = Object.keys(CORE_FILTER_DEFINITIONS_BY_GROUP.event_properties) +/** + * these are properties that PostHog add to events they track for their own purposes + * not part of the general taxonomy + * but often more numerous than actual properties set on events and useful to hide + * to make those properties discoverable + */ +export const NON_DOLLAR_POSTHOG_PROPERTY_KEYS = [ + 'billing_period_end', + 'billing_period_start', + 'current_amount_usd.data_warehouse', + 'current_amount_usd.feature_flags', + 'current_amount_usd.integrations', + 'current_amount_usd.platform_and_support', + 'current_amount_usd.product_analytics', + 'current_amount_usd.session_replay', + 'current_amount_usd.surveys', + 'current_total_amount_usd', + 'current_usage.data_warehouse', + 'current_usage.feature_flags', + 'current_usage.integrations', + 'current_usage.platform_and_support', + 'current_usage.product_analytics', + 'current_usage.session_replay', + 'current_usage.surveys', + 'customer_deactivated', + 'custom_limits.data_warehouse', + 'custom_limits.feature_flags', + 'custom_limits.integrations', + 'custom_limits.platform_and_support', + 'custom_limits.product_analytics', + 'custom_limits.session_replay', + 'custom_limits.surveys', + 'free_allocation.data_warehouse', + 'free_allocation.feature_flags', + 'free_allocation.integrations', + 'free_allocation.platform_and_support', + 'free_allocation.product_analytics', + 'free_allocation.session_replay', + 'free_allocation.surveys', + 'has_billing_plan', + 'percentage_usage.data_warehouse', + 'percentage_usage.feature_flags', + 'percentage_usage.integrations', + 'percentage_usage.platform_and_support', + 'percentage_usage.product_analytics', + 'percentage_usage.session_replay', + 'percentage_usage.surveys', + 'projected_usage.data_warehouse', + 'projected_usage.feature_flags', + 'projected_usage.integrations', + 'projected_usage.platform_and_support', + 'projected_usage.product_analytics', + 'projected_usage.session_replay', + 'projected_usage.surveys', + 'unit_amount_usd.data_warehouse', + 'unit_amount_usd.feature_flags', + 'unit_amount_usd.integrations', + 'unit_amount_usd.platform_and_support', + 'unit_amount_usd.product_analytics', + 'unit_amount_usd.session_replay', + 'unit_amount_usd.surveys', + 'usage_limit.data_warehouse', + 'usage_limit.feature_flags', + 'usage_limit.integrations', + 'usage_limit.platform_and_support', + 'usage_limit.product_analytics', + 'usage_limit.session_replay', + 'usage_limit.surveys', + 'is_demo_project', + 'realm', + 'email_service_available', + 'slack_service_available', + 'commit_sha', +] + /** Return whether a given filter key is part of PostHog's core (marked by the PostHog logo). */ export function isCoreFilter(key: string): boolean { return Object.values(CORE_FILTER_DEFINITIONS_BY_GROUP).some((mapping) => Object.keys(mapping).includes(key)) diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts index 80f26932101e1..4b1ddcd3e2054 100644 --- a/frontend/src/lib/utils/eventUsageLogic.ts +++ b/frontend/src/lib/utils/eventUsageLogic.ts @@ -480,6 +480,11 @@ export const eventUsageLogic = kea([ }), reportExperimentInsightLoadFailed: true, reportExperimentVariantShipped: (experiment: Experiment) => ({ experiment }), + reportExperimentVariantScreenshotUploaded: (experimentId: number | 'new') => ({ experimentId }), + reportExperimentResultsLoadingTimeout: (experimentId: number | 'new') => ({ experimentId }), + reportExperimentReleaseConditionsViewed: (experimentId: number | 'new') => ({ experimentId }), + reportExperimentReleaseConditionsUpdated: (experimentId: number | 'new') => ({ experimentId }), + // Definition Popover reportDataManagementDefinitionHovered: (type: TaxonomicFilterGroupType) => ({ type }), reportDataManagementDefinitionClickView: (type: TaxonomicFilterGroupType) => ({ type }), @@ -1051,6 +1056,26 @@ export const eventUsageLogic = kea([ secondary_metrics_count: experiment.secondary_metrics.length, }) }, + reportExperimentVariantScreenshotUploaded: ({ experimentId }) => { + posthog.capture('experiment variant screenshot uploaded', { + experiment_id: experimentId, + }) + }, + reportExperimentResultsLoadingTimeout: ({ experimentId }) => { + posthog.capture('experiment results loading timeout', { + experiment_id: experimentId, + }) + }, + reportExperimentReleaseConditionsViewed: ({ experimentId }) => { + posthog.capture('experiment release conditions viewed', { + experiment_id: experimentId, + }) + }, + reportExperimentReleaseConditionsUpdated: ({ experimentId }) => { + posthog.capture('experiment release conditions updated', { + experiment_id: experimentId, + }) + }, reportPropertyGroupFilterAdded: () => { posthog.capture('property group filter added') }, diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts index 3b5898aa52e64..9d14c1b3c0acf 100644 --- a/frontend/src/mocks/handlers.ts +++ b/frontend/src/mocks/handlers.ts @@ -57,7 +57,8 @@ export const defaultMocks: Mocks = { '/api/projects/:team_id/annotations/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/event_definitions/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/cohorts/': toPaginatedResponse([MOCK_DEFAULT_COHORT]), - '/api/projects/:team_id/dashboards/': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/dashboards/': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/alerts/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/dashboard_templates': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/dashboard_templates/repository/': [], '/api/projects/:team_id/external_data_sources/': EMPTY_PAGINATED_RESPONSE, @@ -76,8 +77,8 @@ export const defaultMocks: Mocks = { }, '/api/projects/:team_id/groups/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/groups_types/': [], - '/api/projects/:team_id/insights/': EMPTY_PAGINATED_RESPONSE, - '/api/projects/:team_id/insights/:insight_id/sharing/': { + '/api/environments/:team_id/insights/': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/insights/:insight_id/sharing/': { enabled: false, access_token: 'foo', created_at: '2020-11-11T00:00:00Z', @@ -86,7 +87,7 @@ export const defaultMocks: Mocks = { '/api/projects/:team_id/feature_flags/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/feature_flags/:feature_flag_id/role_access': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/experiments/': EMPTY_PAGINATED_RESPONSE, - '/api/projects/:team_id/explicit_members/': [], + '/api/environments/:team_id/explicit_members/': [], '/api/projects/:team_id/warehouse_view_link/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/warehouse_saved_queries/': EMPTY_PAGINATED_RESPONSE, '/api/projects/:team_id/warehouse_tables/': EMPTY_PAGINATED_RESPONSE, @@ -104,9 +105,9 @@ export const defaultMocks: Mocks = { '/api/organizations/@current/plugins/repository/': [], '/api/organizations/@current/plugins/unused/': [], '/api/plugin_config/': toPaginatedResponse([MOCK_DEFAULT_PLUGIN_CONFIG]), - [`/api/projects/:team_id/plugin_configs/${MOCK_DEFAULT_PLUGIN_CONFIG.id}/`]: MOCK_DEFAULT_PLUGIN_CONFIG, - '/api/projects/:team_id/persons': EMPTY_PAGINATED_RESPONSE, - '/api/projects/:team_id/persons/properties/': toPaginatedResponse(MOCK_PERSON_PROPERTIES), + [`/api/environments/:team_id/plugin_configs/${MOCK_DEFAULT_PLUGIN_CONFIG.id}/`]: MOCK_DEFAULT_PLUGIN_CONFIG, + '/api/environments/:team_id/persons': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/persons/properties/': toPaginatedResponse(MOCK_PERSON_PROPERTIES), '/api/personal_api_keys/': [], '/api/users/@me/': (): MockSignature => [ 200, @@ -118,6 +119,7 @@ export const defaultMocks: Mocks = { }, }, ], + '/api/environments/@current/': MOCK_DEFAULT_TEAM, '/api/projects/@current/': MOCK_DEFAULT_TEAM, '/api/projects/:team_id/comments/count': { count: 0 }, '/api/projects/:team_id/comments': { results: [] }, @@ -157,8 +159,8 @@ export const defaultMocks: Mocks = { 'https://us.i.posthog.com/decide/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), '/decide/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), 'https://us.i.posthog.com/engage/': (req, res, ctx): MockSignature => posthogCORSResponse(req, res, ctx), - '/api/projects/:team_id/insights/:insight_id/viewed/': (): MockSignature => [201, null], - 'api/projects/:team_id/query': [200, { results: [] }], + '/api/environments/:team_id/insights/:insight_id/viewed/': (): MockSignature => [201, null], + 'api/environments/:team_id/query': [200, { results: [] }], }, patch: { '/api/projects/:team_id/session_recording_playlists/:playlist_id/': {}, diff --git a/frontend/src/models/dashboardsModel.test.ts b/frontend/src/models/dashboardsModel.test.ts index 8c021609e5ded..c3d42a78b83b7 100644 --- a/frontend/src/models/dashboardsModel.test.ts +++ b/frontend/src/models/dashboardsModel.test.ts @@ -62,7 +62,7 @@ describe('the dashboards model', () => { beforeEach(async () => { useMocks({ get: { - '/api/projects/:team_id/dashboards/': () => { + '/api/environments/:team_id/dashboards/': () => { return [ 200, { diff --git a/frontend/src/models/dashboardsModel.tsx b/frontend/src/models/dashboardsModel.tsx index f90650d8bb831..99c4cdf01ea96 100644 --- a/frontend/src/models/dashboardsModel.tsx +++ b/frontend/src/models/dashboardsModel.tsx @@ -92,7 +92,7 @@ export const dashboardsModel = kea([ return { count: 0, next: null, previous: null, results: [] } } const dashboards: PaginatedResponse = await api.get( - url || `api/projects/${teamLogic.values.currentTeamId}/dashboards/?limit=2000` + url || `api/environments/${teamLogic.values.currentTeamId}/dashboards/?limit=2000` ) return { @@ -115,7 +115,7 @@ export const dashboardsModel = kea([ const beforeChange = { ...values.rawDashboards[id] } const response = (await api.update( - `api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, payload )) as DashboardType const updatedAttribute = Object.keys(payload)[0] @@ -135,7 +135,7 @@ export const dashboardsModel = kea([ label: 'Undo', action: async () => { const reverted = (await api.update( - `api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, beforeChange )) as DashboardType actions.updateDashboardSuccess(getQueryBasedDashboard(reverted)) @@ -148,33 +148,39 @@ export const dashboardsModel = kea([ }, deleteDashboard: async ({ id, deleteInsights }) => getQueryBasedDashboard( - await api.update(`api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, { + await api.update(`api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, { deleted: true, delete_insights: deleteInsights, }) ) as DashboardType, restoreDashboard: async ({ id }) => getQueryBasedDashboard( - await api.update(`api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, { + await api.update(`api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, { deleted: false, }) ) as DashboardType, pinDashboard: async ({ id, source }) => { - const response = (await api.update(`api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, { - pinned: true, - })) as DashboardType + const response = (await api.update( + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, + { + pinned: true, + } + )) as DashboardType eventUsageLogic.actions.reportDashboardPinToggled(true, source) return getQueryBasedDashboard(response)! }, unpinDashboard: async ({ id, source }) => { - const response = (await api.update(`api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}`, { - pinned: false, - })) as DashboardType + const response = (await api.update( + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}`, + { + pinned: false, + } + )) as DashboardType eventUsageLogic.actions.reportDashboardPinToggled(false, source) return getQueryBasedDashboard(response)! }, duplicateDashboard: async ({ id, name, show, duplicateTiles }) => { - const result = (await api.create(`api/projects/${teamLogic.values.currentTeamId}/dashboards/`, { + const result = (await api.create(`api/environments/${teamLogic.values.currentTeamId}/dashboards/`, { use_dashboard: id, name: `${name} (Copy)`, duplicate_tiles: duplicateTiles, diff --git a/frontend/src/queries/Query/Query.tsx b/frontend/src/queries/Query/Query.tsx index 9a004a846569c..2b5d3284bb2ed 100644 --- a/frontend/src/queries/Query/Query.tsx +++ b/frontend/src/queries/Query/Query.tsx @@ -13,6 +13,7 @@ import { DashboardFilter, DataTableNode, DataVisualizationNode, + HogQLVariable, InsightVizNode, Node, } from '~/queries/schema' @@ -50,10 +51,20 @@ export interface QueryProps { inSharedMode?: boolean /** Dashboard filters to override the ones in the query */ filtersOverride?: DashboardFilter | null + /** Dashboard variables to override the ones in the query */ + variablesOverride?: Record | null } export function Query(props: QueryProps): JSX.Element | null { - const { query: propsQuery, setQuery: propsSetQuery, readOnly, embedded, filtersOverride, inSharedMode } = props + const { + query: propsQuery, + setQuery: propsSetQuery, + readOnly, + embedded, + filtersOverride, + variablesOverride, + inSharedMode, + } = props const [localQuery, localSetQuery] = useState(propsQuery) useEffect(() => { @@ -63,7 +74,7 @@ export function Query(props: QueryProps): JSX.Element | null }, [propsQuery]) const query = readOnly ? propsQuery : localQuery - const setQuery = readOnly ? undefined : propsSetQuery ?? localSetQuery + const setQuery = propsSetQuery ?? localSetQuery const queryContext = props.context || {} @@ -87,21 +98,23 @@ export function Query(props: QueryProps): JSX.Element | null component = ( void) | undefined} + setQuery={setQuery as unknown as (query: DataTableNode) => void} context={queryContext} cachedResults={props.cachedResults} uniqueKey={uniqueKey} + readOnly={readOnly} /> ) } else if (isDataVisualizationNode(query)) { component = ( void) | undefined} + setQuery={setQuery as unknown as (query: DataVisualizationNode) => void} cachedResults={props.cachedResults} uniqueKey={uniqueKey} context={queryContext} readOnly={readOnly} + variablesOverride={props.variablesOverride} /> ) } else if (isSavedInsightNode(query)) { @@ -110,25 +123,20 @@ export function Query(props: QueryProps): JSX.Element | null component = ( void) | undefined} + setQuery={setQuery as unknown as (query: InsightVizNode) => void} context={queryContext} readOnly={readOnly} uniqueKey={uniqueKey} embedded={embedded} inSharedMode={inSharedMode} filtersOverride={filtersOverride} + variablesOverride={variablesOverride} /> ) } else if (isWebOverviewQuery(query)) { component = } else if (isHogQuery(query)) { - component = ( - void)} - queryKey={String(uniqueKey)} - /> - ) + component = void} queryKey={String(uniqueKey)} /> } else { component = } diff --git a/frontend/src/queries/nodes/DataNode/DataNode.stories.tsx b/frontend/src/queries/nodes/DataNode/DataNode.stories.tsx index 7b20caa2b8037..4868de5e4fe3e 100644 --- a/frontend/src/queries/nodes/DataNode/DataNode.stories.tsx +++ b/frontend/src/queries/nodes/DataNode/DataNode.stories.tsx @@ -19,8 +19,8 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/events': events, - '/api/projects/:team_id/persons': persons, + '/api/environments/:team_id/events': events, + '/api/environments/:team_id/persons': persons, }, }), ], diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts index be84e5ad61e14..9401a11605d7c 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.queryCancellation.test.ts @@ -17,7 +17,7 @@ describe('dataNodeLogic - query cancellation', () => { featureFlagLogic.mount() useMocks({ get: { - '/api/projects/:team/insights/trend/': async (req) => { + '/api/environments/:team_id/insights/trend/': async (req) => { if (req.url.searchParams.get('date_from') === '-180d') { // delay for a second before response without pausing return new Promise((resolve) => @@ -30,8 +30,8 @@ describe('dataNodeLogic - query cancellation', () => { }, }, post: { - '/api/projects/997/insights/cancel/': [201], - '/api/projects/997/query/': async () => { + '/api/environments/997/insights/cancel/': [201], + '/api/environments/997/query/': async () => { return new Promise((resolve) => setTimeout(() => { resolve([200, { result: ['slow result from api'] }]) @@ -40,7 +40,7 @@ describe('dataNodeLogic - query cancellation', () => { }, }, delete: { - '/api/projects/:team_id/query/uuid-first': [200, {}], + '/api/environments/:team_id/query/uuid-first': [200, {}], }, }) }) diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.test.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.test.ts index 4329a52946342..e63cfba6f9309 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.test.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.test.ts @@ -2,7 +2,7 @@ import { expectLogic, partial } from 'kea-test-utils' import { dataNodeLogic } from '~/queries/nodes/DataNode/dataNodeLogic' import { performQuery } from '~/queries/query' -import { DashboardFilter, NodeKind } from '~/queries/schema' +import { DashboardFilter, HogQLVariable, NodeKind } from '~/queries/schema' import { initKeaTests } from '~/test/init' jest.mock('~/queries/query', () => { @@ -473,6 +473,40 @@ describe('dataNodeLogic', () => { expect.any(String), expect.any(Function), filtersOverride, + undefined, + false + ) + }) + + it('passes variablesOverride to api', async () => { + const variablesOverride: Record = { + test_1: { + variableId: 'some_id', + code_name: 'some_name', + value: 'hello world', + }, + } + + const query = { + kind: NodeKind.EventsQuery, + select: ['*', 'event', 'timestamp'], + } + + logic = dataNodeLogic({ + key: 'key', + query, + variablesOverride, + }) + logic.mount() + + expect(performQuery).toHaveBeenCalledWith( + query, + expect.anything(), + false, + expect.any(String), + expect.any(Function), + undefined, + variablesOverride, false ) }) @@ -497,6 +531,32 @@ describe('dataNodeLogic', () => { expect.any(String), expect.any(Function), undefined, + undefined, + false + ) + }) + + it("doesn't pass undefined variablesOverride to api", async () => { + const query = { + kind: NodeKind.EventsQuery, + select: ['*', 'event', 'timestamp'], + } + + logic = dataNodeLogic({ + key: 'key', + query, + variablesOverride: undefined, + }) + logic.mount() + + expect(performQuery).toHaveBeenCalledWith( + query, + expect.anything(), + false, + expect.any(String), + expect.any(Function), + undefined, + undefined, false ) }) diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 997857b202f9c..25d0f75848491 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -28,7 +28,7 @@ import { userLogic } from 'scenes/userLogic' import { dataNodeCollectionLogic, DataNodeCollectionProps } from '~/queries/nodes/DataNode/dataNodeCollectionLogic' import { removeExpressionComment } from '~/queries/nodes/DataTable/utils' import { performQuery } from '~/queries/query' -import { DashboardFilter, QueryStatus } from '~/queries/schema' +import { DashboardFilter, HogQLVariable, QueryStatus } from '~/queries/schema' import { ActorsQuery, ActorsQueryResponse, @@ -66,6 +66,8 @@ export interface DataNodeLogicProps { /** Dashboard filters to override the ones in the query */ filtersOverride?: DashboardFilter | null + /** Dashboard variables to override the ones in the query */ + variablesOverride?: Record | null } export const AUTOLOAD_INTERVAL = 30000 @@ -99,7 +101,7 @@ export const dataNodeLogic = kea([ ], ], })), - props({ query: {} } as DataNodeLogicProps), + props({ query: {}, variablesOverride: undefined } as DataNodeLogicProps), propsChanged(({ actions, props }, oldProps) => { if (!props.query) { return // Can't do anything without a query @@ -214,6 +216,7 @@ export const dataNodeLogic = kea([ queryId, actions.setPollResponse, props.filtersOverride, + props.variablesOverride, pollOnly )) ?? null const duration = performance.now() - now @@ -451,6 +454,10 @@ export const dataNodeLogic = kea([ ], })), selectors(({ cache }) => ({ + variableOverridesAreSet: [ + (_, p) => [p.variablesOverride ?? (() => ({}))], + (variablesOverride) => !!variablesOverride, + ], isShowingCachedResults: [ () => [(_, props) => props.cachedResults ?? null, (_, props) => props.query], (cachedResults: AnyResponseType | null, query: DataNode): boolean => { @@ -652,7 +659,7 @@ export const dataNodeLogic = kea([ abortQuery: async ({ queryId }) => { try { const { currentTeamId } = values - await api.delete(`api/projects/${currentTeamId}/query/${queryId}/`) + await api.delete(`api/environments/${currentTeamId}/query/${queryId}/`) } catch (e) { console.warn('Failed cancelling query', e) } diff --git a/frontend/src/queries/nodes/DataTable/DataTable.stories.tsx b/frontend/src/queries/nodes/DataTable/DataTable.stories.tsx index 64a978e45f95d..e909710177813 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.stories.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.stories.tsx @@ -19,8 +19,8 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/events': events, - '/api/projects/:team_id/persons': persons, + '/api/environments/:team_id/events': events, + '/api/environments/:team_id/persons': persons, }, }), ], diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index 335cd7d6730f7..12e4193fe35dc 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -68,7 +68,7 @@ import { DataTableOpenEditor } from './DataTableOpenEditor' interface DataTableProps { uniqueKey?: string | number query: DataTableNode - setQuery?: (query: DataTableNode) => void + setQuery: (query: DataTableNode) => void /** Custom table columns */ context?: QueryContext /* Cached Results are provided when shared or exported, @@ -76,6 +76,7 @@ interface DataTableProps { cachedResults?: AnyResponseType // Override the data logic node key if needed dataNodeLogicKey?: string + readOnly?: boolean } const eventGroupTypes = [ @@ -88,7 +89,14 @@ const personGroupTypes = [TaxonomicFilterGroupType.HogQLExpression, TaxonomicFil let uniqueNode = 0 -export function DataTable({ uniqueKey, query, setQuery, context, cachedResults }: DataTableProps): JSX.Element { +export function DataTable({ + uniqueKey, + query, + setQuery, + context, + cachedResults, + readOnly, +}: DataTableProps): JSX.Element { const [uniqueNodeKey] = useState(() => uniqueNode++) const [dataKey] = useState(() => `DataNode.${uniqueKey || uniqueNodeKey}`) const insightProps: InsightLogicProps = context?.insightProps || { @@ -148,7 +156,7 @@ export function DataTable({ uniqueKey, query, setQuery, context, cachedResults } showTimings, } = queryWithDefaults - const isReadOnly = setQuery === undefined + const isReadOnly = !!readOnly const eventActionsColumnShown = showActions && sourceFeatures.has(QueryFeature.eventActionsColumn) && columnsInResponse?.includes('*') diff --git a/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts b/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts index e2fc4bde77224..24d058bd7e8d6 100644 --- a/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts +++ b/frontend/src/queries/nodes/DataTable/dataTableLogic.test.ts @@ -67,6 +67,7 @@ describe('dataTableLogic', () => { expect.any(String), expect.any(Function), undefined, + undefined, false ) expect(performQuery).toHaveBeenCalledTimes(1) diff --git a/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx b/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx index cf29b0888d347..1905862986584 100644 --- a/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx +++ b/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx @@ -4,9 +4,9 @@ import { SortingIndicator } from 'lib/lemon-ui/LemonTable/sorting' import { getQueryFeatures, QueryFeature } from '~/queries/nodes/DataTable/queryFeatures' import { extractExpressionComment } from '~/queries/nodes/DataTable/utils' -import { DataTableNode, EventsQuery } from '~/queries/schema' +import { DataTableNode, DataVisualizationNode, EventsQuery } from '~/queries/schema' import { QueryContext } from '~/queries/types' -import { isHogQLQuery, trimQuotes } from '~/queries/utils' +import { isDataTableNode, isHogQLQuery, trimQuotes } from '~/queries/utils' export interface ColumnMeta { title?: JSX.Element | string @@ -14,7 +14,11 @@ export interface ColumnMeta { align?: 'left' | 'right' | 'center' } -export function renderColumnMeta(key: string, query: DataTableNode, context?: QueryContext): ColumnMeta { +export function renderColumnMeta( + key: string, + query: T, + context?: QueryContext +): ColumnMeta { let width: string | number | undefined let title: JSX.Element | string | undefined const queryFeatures = getQueryFeatures(query.source) @@ -34,7 +38,8 @@ export function renderColumnMeta(key: string, query: DataTableNode, context?: Qu } if (title.startsWith("tuple('__hx_tag', '")) { const tagName = title.substring(19, title.indexOf("'", 19)) - title = tagName === '__hx_obj' ? 'Object' : '<' + tagName + ' />' + title = + tagName === '__hx_obj' ? 'Object' : tagName === 'RecordingButton' ? 'Recording' : '<' + tagName + ' />' } } else if (key === 'timestamp') { title = 'Time' @@ -87,7 +92,8 @@ export function renderColumnMeta(key: string, query: DataTableNode, context?: Qu title = Component ? : context?.columns?.[key]?.title } - if (queryFeatures.has(QueryFeature.selectAndOrderByColumns) && !query.allowSorting) { + if (queryFeatures.has(QueryFeature.selectAndOrderByColumns) && isDataTableNode(query) && !query.allowSorting) { + query const sortKey = queryFeatures.has(QueryFeature.selectAndOrderByColumns) ? (query.source as EventsQuery)?.orderBy?.[0] : null diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx index 967a9666b5940..d0a4af4d22ff8 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx @@ -10,6 +10,7 @@ import { QueryContext } from '~/queries/types' import { LoadNext } from '../../DataNode/LoadNext' import { renderColumn } from '../../DataTable/renderColumn' +import { renderColumnMeta } from '../../DataTable/renderColumnMeta' import { convertTableValue, dataVisualizationLogic, TableDataCell } from '../dataVisualizationLogic' interface TableProps { @@ -33,62 +34,67 @@ export const Table = (props: TableProps): JSX.Element => { } = useValues(dataVisualizationLogic) const tableColumns: LemonTableColumn[], any>[] = tabularColumns.map( - ({ column, settings }, index) => ({ - title: settings?.display?.label || column.name, - render: (_, data, recordIndex: number) => { - return renderColumn(column.name, data[index].formattedValue, data, recordIndex, { - kind: NodeKind.DataTableNode, - source: props.query.source, - }) - }, - style: (_, data) => { - const cf = conditionalFormattingRules - .filter((n) => n.columnName === column.name) - .map((n) => { - const res = execHog(n.bytecode, { - globals: { - value: data[index].value, - input: convertTableValue(n.input, column.type.name), - }, - functions: {}, - maxAsyncSteps: 0, - }) + ({ column, settings }, index) => { + const { title, ...columnMeta } = renderColumnMeta(column.name, props.query, props.context) - return { - rule: n, - result: res.result, - } + return { + ...columnMeta, + title: settings?.display?.label || title || column.name, + render: (_, data, recordIndex: number) => { + return renderColumn(column.name, data[index].formattedValue, data, recordIndex, { + kind: NodeKind.DataTableNode, + source: props.query.source, }) + }, + style: (_, data) => { + const cf = conditionalFormattingRules + .filter((n) => n.columnName === column.name) + .map((n) => { + const res = execHog(n.bytecode, { + globals: { + value: data[index].value, + input: convertTableValue(n.input, column.type.name), + }, + functions: {}, + maxAsyncSteps: 0, + }) + + return { + rule: n, + result: res.result, + } + }) - const conditionalFormattingMatches = cf.find((n) => Boolean(n.result)) + const conditionalFormattingMatches = cf.find((n) => Boolean(n.result)) - if (conditionalFormattingMatches) { - const ruleColor = conditionalFormattingMatches.rule.color - const colorMode = conditionalFormattingMatches.rule.colorMode ?? 'light' + if (conditionalFormattingMatches) { + const ruleColor = conditionalFormattingMatches.rule.color + const colorMode = conditionalFormattingMatches.rule.colorMode ?? 'light' - // If the color mode matches the current theme, return as it was saved - if ((colorMode === 'dark' && isDarkModeOn) || (colorMode === 'light' && !isDarkModeOn)) { - return { - backgroundColor: ruleColor, + // If the color mode matches the current theme, return as it was saved + if ((colorMode === 'dark' && isDarkModeOn) || (colorMode === 'light' && !isDarkModeOn)) { + return { + backgroundColor: ruleColor, + } } - } - // If the color mode is dark, but we're in light mode - then lighten the color - if (colorMode === 'dark' && !isDarkModeOn) { - return { - backgroundColor: lightenDarkenColor(ruleColor, 30), + // If the color mode is dark, but we're in light mode - then lighten the color + if (colorMode === 'dark' && !isDarkModeOn) { + return { + backgroundColor: lightenDarkenColor(ruleColor, 30), + } } - } - // If the color mode is light, but we're in dark mode - then darken the color - return { - backgroundColor: lightenDarkenColor(ruleColor, -30), + // If the color mode is light, but we're in dark mode - then darken the color + return { + backgroundColor: lightenDarkenColor(ruleColor, -30), + } } - } - return undefined - }, - }) + return undefined + }, + } + } ) return ( diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx index b80618f2e823f..a4bbed9d1d3e7 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx @@ -5,14 +5,14 @@ import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { dataVisualizationLogic } from '../../dataVisualizationLogic' -import { addVariableLogic } from './addVariableLogic' import { NewVariableModal } from './NewVariableModal' +import { variableModalLogic } from './variableModalLogic' import { variablesLogic } from './variablesLogic' export const AddVariableButton = (): JSX.Element => { const { showEditingUI } = useValues(dataVisualizationLogic) const { featureFlags } = useValues(featureFlagLogic) - const { openModal } = useActions(addVariableLogic) + const { openNewVariableModal } = useActions(variableModalLogic) const { variables, variablesLoading } = useValues(variablesLogic) const { addVariable } = useActions(variablesLogic) @@ -30,19 +30,19 @@ export const AddVariableButton = (): JSX.Element => { items: [ { label: 'String', - onClick: () => openModal('String'), + onClick: () => openNewVariableModal('String'), }, { label: 'Number', - onClick: () => openModal('Number'), + onClick: () => openNewVariableModal('Number'), }, { label: 'Boolean', - onClick: () => openModal('Boolean'), + onClick: () => openNewVariableModal('Boolean'), }, { label: 'List', - onClick: () => openModal('List'), + onClick: () => openNewVariableModal('List'), }, ], }, @@ -57,7 +57,7 @@ export const AddVariableButton = (): JSX.Element => { ] : variables.map((n) => ({ label: n.name, - onClick: () => addVariable({ variableId: n.id, code_name: '' }), + onClick: () => addVariable({ variableId: n.id, code_name: n.code_name }), })), }, ]} diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx index b7386fd745d5a..95c0d66a1c3d9 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx @@ -10,7 +10,7 @@ import { useActions, useValues } from 'kea' import { LemonField } from 'lib/lemon-ui/LemonField' import { Variable } from '../../types' -import { addVariableLogic } from './addVariableLogic' +import { variableModalLogic } from './variableModalLogic' const renderVariableSpecificFields = ( variable: Variable, @@ -95,12 +95,14 @@ const renderVariableSpecificFields = ( } export const NewVariableModal = (): JSX.Element => { - const { closeModal, updateVariable, save } = useActions(addVariableLogic) - const { isModalOpen, variable } = useValues(addVariableLogic) + const { closeModal, updateVariable, save } = useActions(variableModalLogic) + const { isModalOpen, variable, modalType } = useValues(variableModalLogic) + + const title = modalType === 'new' ? `New ${variable.type} variable` : `Editing ${variable.name}` return ( { +export const VariablesForDashboard = (): JSX.Element => { + const { featureFlags } = useValues(featureFlagLogic) + const { dashboardVariables } = useValues(dashboardLogic) + const { overrideVariableValue } = useActions(dashboardLogic) + + if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !dashboardVariables.length) { + return <> + } + + return ( + <> +
+ {dashboardVariables.map((n) => ( + + ))} +
+ + ) +} + +export const VariablesForInsight = (): JSX.Element => { const { featureFlags } = useValues(featureFlagLogic) - const { variablesForInsight } = useValues(variablesLogic) + const { variablesForInsight, showVariablesBar } = useValues(variablesLogic) + const { updateVariableValue, removeVariable } = useActions(variablesLogic) + const { showEditingUI } = useValues(dataVisualizationLogic) + const { variableOverridesAreSet } = useValues(dataNodeLogic) + const { openExistingVariableModal } = useActions(variableModalLogic) - if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !variablesForInsight.length) { + if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !variablesForInsight.length || !showVariablesBar) { return <> } @@ -26,7 +60,15 @@ export const Variables = (): JSX.Element => { <>
{variablesForInsight.map((n) => ( - + openExistingVariableModal(n)} + /> ))}
@@ -34,11 +76,32 @@ export const Variables = (): JSX.Element => { ) } -const VariableInput = ({ variable, closePopover }: { variable: Variable; closePopover: () => void }): JSX.Element => { - const { showEditingUI } = useValues(dataVisualizationLogic) - const { updateVariableValue } = useActions(variablesLogic) +interface VariableInputProps { + variable: Variable + showEditingUI: boolean + closePopover: () => void + onChange: (variableId: string, value: any) => void + onRemove?: (variableId: string) => void + variableSettingsOnClick?: () => void +} - const [localInputValue, setLocalInputValue] = useState(variable.value ?? variable.default_value ?? '') +const VariableInput = ({ + variable, + showEditingUI, + closePopover, + onChange, + onRemove, + variableSettingsOnClick, +}: VariableInputProps): JSX.Element => { + const [localInputValue, setLocalInputValue] = useState(() => { + const val = variable.value ?? variable.default_value + + if (variable.type === 'Number' && !val) { + return 0 + } + + return val ?? '' + }) const inputRef = useRef(null) const codeRef = useRef(null) @@ -52,21 +115,62 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo return (
- setLocalInputValue(value)} - onPressEnter={() => { - updateVariableValue(variable.id, localInputValue) - closePopover() - }} - /> + {variable.type === 'String' && ( + setLocalInputValue(value)} + onPressEnter={() => { + onChange(variable.id, localInputValue) + closePopover() + }} + /> + )} + {variable.type === 'Number' && ( + setLocalInputValue(value ?? 0)} + onPressEnter={() => { + onChange(variable.id, localInputValue) + closePopover() + }} + /> + )} + {variable.type === 'Boolean' && ( + setLocalInputValue(value === 'true')} + options={[ + { + value: 'true', + label: 'true', + }, + { + value: 'false', + label: 'false', + }, + ]} + /> + )} + {variable.type === 'List' && ( + setLocalInputValue(value)} + options={variable.values.map((n) => ({ label: n, value: n }))} + /> + )} { - updateVariableValue(variable.id, localInputValue) + onChange(variable.id, localInputValue) closePopover() }} > @@ -92,7 +196,7 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo } } }} - className="text-xs flex flex-1 items-center" + className="text-xs flex flex-1 items-center mr-2" > {variableAsHogQL} @@ -102,7 +206,22 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo onClick={() => void copyToClipboard(variableAsHogQL, 'variable HogQL')} tooltip="Copy HogQL" /> - } size="xsmall" tooltip="Open variable settings" /> + {onRemove && ( + onRemove(variable.id)} + icon={} + size="xsmall" + tooltip="Remove variable from insight" + /> + )} + {variableSettingsOnClick && ( + } + size="xsmall" + tooltip="Open variable settings" + /> + )}
)} @@ -110,13 +229,43 @@ const VariableInput = ({ variable, closePopover }: { variable: Variable; closePo ) } -const VariableComponent = ({ variable }: { variable: Variable }): JSX.Element => { +interface VariableComponentProps { + variable: Variable + showEditingUI: boolean + onChange: (variableId: string, value: any) => void + variableOverridesAreSet: boolean + onRemove?: (variableId: string) => void + variableSettingsOnClick?: () => void +} + +const VariableComponent = ({ + variable, + showEditingUI, + onChange, + variableOverridesAreSet, + onRemove, + variableSettingsOnClick, +}: VariableComponentProps): JSX.Element => { const [isPopoverOpen, setPopoverOpen] = useState(false) return ( setPopoverOpen(false)} />} + overlay={ + setPopoverOpen(false)} + onRemove={onRemove} + variableSettingsOnClick={() => { + if (variableSettingsOnClick) { + setPopoverOpen(false) + variableSettingsOnClick() + } + }} + /> + } visible={isPopoverOpen} onClickOutside={() => setPopoverOpen(false)} className="DataVizVariable_Popover" @@ -131,8 +280,9 @@ const VariableComponent = ({ variable }: { variable: Variable }): JSX.Element => type="secondary" className="min-w-32 DataVizVariable_Button" onClick={() => setPopoverOpen(!isPopoverOpen)} + disabledReason={variableOverridesAreSet && 'Discard dashboard variables to change'} > - {variable.value ?? variable.default_value} + {variable.value?.toString() ?? variable.default_value?.toString()}
diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableDataLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableDataLogic.ts new file mode 100644 index 0000000000000..8b7fbc8b98962 --- /dev/null +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableDataLogic.ts @@ -0,0 +1,22 @@ +import { kea, path } from 'kea' +import { loaders } from 'kea-loaders' +import api from 'lib/api' + +import { Variable } from '../../types' +import type { variableDataLogicType } from './variableDataLogicType' + +export const variableDataLogic = kea([ + path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variableDataLogic']), + loaders({ + variables: [ + [] as Variable[], + { + getVariables: async () => { + const insights = await api.insightVariables.list() + + return insights.results + }, + }, + ], + }), +]) diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts similarity index 55% rename from frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts rename to frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts index 396fd3dbc6b87..641e991250537 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts @@ -1,9 +1,11 @@ -import { actions, kea, path, reducers } from 'kea' -import { loaders } from 'kea-loaders' -import api from 'lib/api' +import { lemonToast } from '@posthog/lemon-ui' +import { actions, connect, kea, key, listeners, path, props, reducers } from 'kea' +import api, { ApiError } from 'lib/api' import { BooleanVariable, ListVariable, NumberVariable, StringVariable, Variable, VariableType } from '../../types' -import type { addVariableLogicType } from './addVariableLogicType' +import { variableDataLogic } from './variableDataLogic' +import type { variableModalLogicType } from './variableModalLogicType' +import { variablesLogic } from './variablesLogic' const DEFAULT_VARIABLE: StringVariable = { id: '', @@ -13,31 +15,52 @@ const DEFAULT_VARIABLE: StringVariable = { code_name: '', } -export const addVariableLogic = kea([ +export interface AddVariableLogicProps { + key: string +} + +export const variableModalLogic = kea([ path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variableLogic']), + props({ key: '' } as AddVariableLogicProps), + key((props) => props.key), + connect({ + actions: [variableDataLogic, ['getVariables'], variablesLogic, ['addVariable']], + }), actions({ - openModal: (variableType: VariableType) => ({ variableType }), + openNewVariableModal: (variableType: VariableType) => ({ variableType }), + openExistingVariableModal: (variable: Variable) => ({ variable }), closeModal: true, updateVariable: (variable: Variable) => ({ variable }), + save: true, }), reducers({ + modalType: [ + 'new' as 'new' | 'existing', + { + openNewVariableModal: () => 'new', + openExistingVariableModal: () => 'existing', + }, + ], variableType: [ 'string' as VariableType, { - openModal: (_, { variableType }) => variableType, + openNewVariableModal: (_, { variableType }) => variableType, + openExistingVariableModal: (_, { variable }) => variable.type, }, ], isModalOpen: [ false as boolean, { - openModal: () => true, + openNewVariableModal: () => true, + openExistingVariableModal: () => true, closeModal: () => false, }, ], variable: [ DEFAULT_VARIABLE as Variable, { - openModal: (_, { variableType }) => { + openExistingVariableModal: (_, { variable }) => ({ ...variable }), + openNewVariableModal: (_, { variableType }) => { if (variableType === 'String') { return { id: '', @@ -86,14 +109,22 @@ export const addVariableLogic = kea([ }, ], }), - loaders(({ values }) => ({ - savedVariable: [ - null as null | Variable, - { - save: async () => { - return await api.insightVariables.create(values.variable) - }, - }, - ], + listeners(({ values, actions }) => ({ + save: async () => { + try { + if (values.modalType === 'new') { + const variable = await api.insightVariables.create(values.variable) + actions.addVariable({ variableId: variable.id, code_name: variable.code_name }) + } else { + await api.insightVariables.update(values.variable.id, values.variable) + } + + actions.getVariables() + actions.closeModal() + } catch (e: any) { + const error = e as ApiError + lemonToast.error(error.detail ?? error.message) + } + }, })), ]) diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts index 4c92665b7f9e6..937c027a0a104 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts @@ -1,15 +1,14 @@ -import { actions, afterMount, connect, kea, key, path, props, reducers, selectors } from 'kea' -import { loaders } from 'kea-loaders' +import { actions, afterMount, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { subscriptions } from 'kea-subscriptions' -import api from 'lib/api' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' -import { getVariablesFromQuery } from 'scenes/insights/utils/queryUtils' +import { getVariablesFromQuery, haveVariablesOrFiltersChanged } from 'scenes/insights/utils/queryUtils' import { DataVisualizationNode, HogQLVariable } from '~/queries/schema' import { dataVisualizationLogic } from '../../dataVisualizationLogic' -import { Variable } from '../../types' +import { Variable, VariableType } from '../../types' +import { variableDataLogic } from './variableDataLogic' import type { variablesLogicType } from './variablesLogicType' export interface VariablesLogicProps { @@ -18,37 +17,82 @@ export interface VariablesLogicProps { readOnly: boolean } +const convertValueToCorrectType = (value: string, type: VariableType): number | string | boolean => { + if (type === 'Number') { + return Number(value) + } + + if (type === 'Boolean' && typeof value === 'string') { + return value.toLowerCase() === 'true' + } + + return value +} + export const variablesLogic = kea([ path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variablesLogic']), props({ key: '' } as VariablesLogicProps), key((props) => props.key), connect({ - actions: [dataVisualizationLogic, ['setQuery', 'loadData']], - values: [dataVisualizationLogic, ['query'], featureFlagLogic, ['featureFlags']], + actions: [dataVisualizationLogic, ['setQuery', 'loadData'], variableDataLogic, ['getVariables']], + values: [ + dataVisualizationLogic, + ['query', 'insightLogicProps'], + variableDataLogic, + ['variables', 'variablesLoading'], + featureFlagLogic, + ['featureFlags'], + ], }), - actions({ + actions(({ values }) => ({ addVariable: (variable: HogQLVariable) => ({ variable }), - updateVariableValue: (variableId: string, value: any) => ({ variableId, value }), + addVariables: (variables: HogQLVariable[]) => ({ variables }), + removeVariable: (variableId: string) => ({ variableId }), + updateVariableValue: (variableId: string, value: any) => ({ + variableId, + value, + allVariables: values.variables, + }), setEditorQuery: (query: string) => ({ query }), - }), + updateSourceQuery: true, + })), reducers({ internalSelectedVariables: [ [] as HogQLVariable[], { addVariable: (state, { variable }) => { + if (state.find((n) => variable.variableId === n.variableId)) { + return state + } + return [...state, { ...variable }] }, - updateVariableValue: (state, { variableId, value }) => { + addVariables: (_state, { variables }) => { + return [...variables.map((n) => ({ ...n }))] + }, + updateVariableValue: (state, { variableId, value, allVariables }) => { const variableIndex = state.findIndex((n) => n.variableId === variableId) if (variableIndex < 0) { return state } + const variableType = allVariables.find((n) => n.id === variableId)?.type + const valueWithType = convertValueToCorrectType(value, variableType ?? 'String') + const variablesInState = [...state] - variablesInState[variableIndex] = { ...variablesInState[variableIndex], value } + variablesInState[variableIndex] = { ...variablesInState[variableIndex], value: valueWithType } return variablesInState }, + removeVariable: (state, { variableId }) => { + const stateCopy = [...state] + const index = stateCopy.findIndex((n) => n.variableId === variableId) + if (index >= 0) { + stateCopy.splice(index, 1) + } + + return stateCopy + }, }, ], editorQuery: [ @@ -59,18 +103,6 @@ export const variablesLogic = kea([ }, ], }), - loaders({ - variables: [ - [] as Variable[], - { - getVariables: async () => { - const insights = await api.insightVariables.list() - - return insights.results - }, - }, - ], - }), selectors({ variablesForInsight: [ (s) => [s.variables, s.internalSelectedVariables], @@ -91,17 +123,38 @@ export const variablesLogic = kea([ .filter((n): n is Variable => Boolean(n)) }, ], + showVariablesBar: [ + (state) => [state.insightLogicProps], + (insightLogicProps) => { + return !insightLogicProps.dashboardId + }, + ], }), - subscriptions(({ props, actions, values }) => ({ - variablesForInsight: (variables: Variable[]) => { + listeners(({ props, values, actions }) => ({ + addVariable: () => { + actions.updateSourceQuery() + }, + removeVariable: () => { + actions.updateSourceQuery() + }, + updateVariableValue: () => { + actions.updateSourceQuery() + }, + updateSourceQuery: () => { + if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) { + return + } + + const variables = values.internalSelectedVariables + const query: DataVisualizationNode = { ...values.query, source: { ...values.query.source, variables: variables.reduce((acc, cur) => { - if (cur.id) { - acc[cur.id] = { - variableId: cur.id, + if (cur.variableId) { + acc[cur.variableId] = { + variableId: cur.variableId, value: cur.value, code_name: cur.code_name, } @@ -112,18 +165,20 @@ export const variablesLogic = kea([ }, } - if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) { + const queryVarsHaveChanged = haveVariablesOrFiltersChanged(query.source, values.query.source) + if (!queryVarsHaveChanged) { return } + actions.setQuery(query) + if (props.readOnly) { // Refresh the data manaully via dataNodeLogic when in insight view mode actions.loadData(true, undefined, query.source) - } else { - // Update the query source when in edit mode - actions.setQuery(query) } }, + })), + subscriptions(({ actions, values }) => ({ editorQuery: (query: string) => { const queryVariableMatches = getVariablesFromQuery(query) @@ -137,22 +192,29 @@ export const variablesLogic = kea([ return } - const variableAlreadySelected = values.variablesForInsight.find((n) => n.code_name === match) + const variableAlreadySelected = values.internalSelectedVariables.find((n) => n.code_name === match) if (!variableAlreadySelected) { actions.addVariable({ variableId: variableExists.id, code_name: variableExists.code_name }) } }) }, + query: (query: DataVisualizationNode) => { + if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) { + return + } + + const variables = Object.values(query.source.variables ?? {}) + + if (variables.length) { + actions.addVariables(variables) + } + }, })), afterMount(({ actions, values }) => { if (!values.featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES]) { return } - Object.values(values.query.source.variables ?? {}).forEach((variable) => { - actions.addVariable(variable) - }) - actions.getVariables() }), ]) diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index c15e93076809f..9a021d962b0f9 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -17,7 +17,14 @@ import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { urls } from 'scenes/urls' import { insightVizDataCollectionId, insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz' -import { AnyResponseType, DataVisualizationNode, HogQLQuery, HogQLQueryResponse, NodeKind } from '~/queries/schema' +import { + AnyResponseType, + DataVisualizationNode, + HogQLQuery, + HogQLQueryResponse, + HogQLVariable, + NodeKind, +} from '~/queries/schema' import { QueryContext } from '~/queries/types' import { ChartDisplayType, ExporterFormat, InsightLogicProps } from '~/types' @@ -32,7 +39,8 @@ import { SideBar } from './Components/SideBar' import { Table } from './Components/Table' import { TableDisplay } from './Components/TableDisplay' import { AddVariableButton } from './Components/Variables/AddVariableButton' -import { Variables } from './Components/Variables/Variables' +import { variableModalLogic } from './Components/Variables/variableModalLogic' +import { VariablesForInsight } from './Components/Variables/Variables' import { variablesLogic } from './Components/Variables/variablesLogic' import { dataVisualizationLogic, DataVisualizationLogicProps } from './dataVisualizationLogic' import { displayLogic } from './displayLogic' @@ -40,12 +48,14 @@ import { displayLogic } from './displayLogic' interface DataTableVisualizationProps { uniqueKey?: string | number query: DataVisualizationNode - setQuery?: (query: DataVisualizationNode) => void + setQuery: (query: DataVisualizationNode) => void context?: QueryContext /* Cached Results are provided when shared or exported, the data node logic becomes read only implicitly */ cachedResults?: AnyResponseType readOnly?: boolean + /** Dashboard variables to override the ones in the query */ + variablesOverride?: Record | null } let uniqueNode = 0 @@ -57,6 +67,7 @@ export function DataTableVisualization({ context, cachedResults, readOnly, + variablesOverride, }: DataTableVisualizationProps): JSX.Element { const [key] = useState(`DataVisualizationNode.${uniqueKey ?? uniqueNode++}`) const insightProps: InsightLogicProps = context?.insightProps || { @@ -73,6 +84,7 @@ export function DataTableVisualization({ insightLogicProps: insightProps, setQuery, cachedResults, + variablesOverride, } const dataNodeLogicProps: DataNodeLogicProps = { @@ -81,6 +93,7 @@ export function DataTableVisualization({ cachedResults, loadPriority: insightProps.loadPriority, dataNodeCollectionId: insightVizDataCollectionId(insightProps, key), + variablesOverride, } return ( @@ -91,14 +104,16 @@ export function DataTableVisualization({ logic={variablesLogic} props={{ key: dataVisualizationLogicProps.key, readOnly: readOnly ?? false }} > - + + + @@ -238,7 +253,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX )} - +
{showEditingUI && isChartSettingsPanelOpen && ( diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index b0ffb32015d57..a1a23ac6f0dfa 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -16,6 +16,7 @@ import { ChartSettingsFormatting, ConditionalFormattingRule, DataVisualizationNode, + HogQLVariable, } from '~/queries/schema' import { QueryContext } from '~/queries/types' import { ChartDisplayType, InsightLogicProps, ItemMode } from '~/types' @@ -68,6 +69,8 @@ export interface DataVisualizationLogicProps { context?: QueryContext cachedResults?: AnyResponseType insightLoading?: boolean + /** Dashboard variables to override the ones in the query */ + variablesOverride?: Record | null } export interface SelectedYAxis { @@ -222,6 +225,7 @@ export const dataVisualizationLogic = kea([ query: props.query.source, dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key), loadPriority: props.insightLogicProps.loadPriority, + variablesOverride: props.variablesOverride, }), ['response', 'responseLoading', 'responseError', 'queryCancelled'], themeLogic, @@ -234,11 +238,12 @@ export const dataVisualizationLogic = kea([ query: props.query.source, dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key), loadPriority: props.insightLogicProps.loadPriority, + variablesOverride: props.variablesOverride, }), ['loadData'], ], })), - props({ query: {} } as DataVisualizationLogicProps), + props({ query: { source: {} } } as DataVisualizationLogicProps), actions(({ values }) => ({ setVisualizationType: (visualizationType: ChartDisplayType) => ({ visualizationType }), updateXSeries: (columnName: string) => ({ @@ -559,6 +564,7 @@ export const dataVisualizationLogic = kea([ return insightMode == ItemMode.Edit }, ], + insightLogicProps: [(_state, props) => [props.insightLogicProps], (insightLogicProps) => insightLogicProps], showResultControls: [ (state, props) => [state.insightMode, props.insightLogicProps], (insightMode, insightLogicProps) => { diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.test.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.test.ts index 97c388dd7d9dc..12bc28757ea8b 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.test.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.test.ts @@ -132,4 +132,26 @@ describe('cleanEntityProperties', () => { }, ]) }) + + it('handles negation for cohorts', () => { + let properties: any = [ + { + key: 'id', + type: 'cohort', + value: 1, + operator: 'exact', + negation: false, + }, + ] + let result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'exact' }]) + + properties = [{ key: 'id', type: 'cohort', value: 1, operator: 'exact', negation: true }] + result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'not_in' }]) + + properties = [{ key: 'id', type: 'cohort', value: 1, negation: true }] + result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'not_in' }]) + }) }) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts index 4474b6423dbf6..82edbfb56cfb9 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts @@ -142,6 +142,14 @@ const cleanProperty = (property: Record): AnyPropertyFilter => { delete property['operator'] } + // convert `negation` for cohorts + if (property['type'] === 'cohort' && property['negation'] !== undefined) { + if (property['operator'] === PropertyOperator.Exact && property['negation']) { + property['operator'] = PropertyOperator.NotIn + } + delete property['negation'] + } + // remove none from values if (Array.isArray(property['value'])) { property['value'] = property['value'].filter((x) => x !== null) diff --git a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx index aa47a108cd4c4..5e0e7be45a936 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx @@ -10,7 +10,7 @@ import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' import { keyForInsightLogicProps } from 'scenes/insights/sharedUtils' import { ErrorBoundary } from '~/layout/ErrorBoundary' -import { DashboardFilter, InsightVizNode } from '~/queries/schema' +import { DashboardFilter, HogQLVariable, InsightVizNode } from '~/queries/schema' import { QueryContext } from '~/queries/types' import { isFunnelsQuery } from '~/queries/utils' import { InsightLogicProps, ItemMode } from '~/types' @@ -32,12 +32,13 @@ export const insightVizDataCollectionId = (props: InsightLogicProps | undef type InsightVizProps = { uniqueKey?: string | number query: InsightVizNode - setQuery?: (node: InsightVizNode) => void + setQuery: (node: InsightVizNode) => void context?: QueryContext readOnly?: boolean embedded?: boolean inSharedMode?: boolean filtersOverride?: DashboardFilter | null + variablesOverride?: Record | null } let uniqueNode = 0 @@ -51,6 +52,7 @@ export function InsightViz({ embedded, inSharedMode, filtersOverride, + variablesOverride, }: InsightVizProps): JSX.Element { const [key] = useState(() => `InsightViz.${uniqueKey || uniqueNode++}`) const insightProps: InsightLogicProps = context?.insightProps || { @@ -59,6 +61,7 @@ export function InsightViz({ setQuery, dataNodeCollectionId: key, filtersOverride, + variablesOverride, } if (!insightProps.setQuery && setQuery) { @@ -75,6 +78,7 @@ export function InsightViz({ loadPriority: insightProps.loadPriority, dataNodeCollectionId: insightVizDataCollectionId(insightProps, vizKey), filtersOverride, + variablesOverride, } const { insightMode } = useValues(insightSceneLogic) diff --git a/frontend/src/queries/query.test.ts b/frontend/src/queries/query.test.ts index f5783bebd2113..060333b75be19 100644 --- a/frontend/src/queries/query.test.ts +++ b/frontend/src/queries/query.test.ts @@ -11,7 +11,7 @@ describe('query', () => { beforeEach(() => { useMocks({ post: { - '/api/projects/:team/query': (req) => { + '/api/environments/:team_id/query': (req) => { const data = req.body as any if (data.query?.kind === 'HogQLQuery') { return [200, { results: [], clickhouse: 'clickhouse string', hogql: 'hogql string' }] diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index 1ea01c13868f1..1952432e3607f 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -6,7 +6,16 @@ import posthog from 'posthog-js' import { OnlineExportContext, QueryExportContext } from '~/types' -import { DashboardFilter, DataNode, HogQLQuery, HogQLQueryResponse, NodeKind, PersonsNode, QueryStatus } from './schema' +import { + DashboardFilter, + DataNode, + HogQLQuery, + HogQLQueryResponse, + HogQLVariable, + NodeKind, + PersonsNode, + QueryStatus, +} from './schema' import { isAsyncResponse, isDataTableNode, @@ -79,6 +88,7 @@ async function executeQuery( queryId?: string, setPollResponse?: (response: QueryStatus) => void, filtersOverride?: DashboardFilter | null, + variablesOverride?: Record | null, /** * Whether to limit the function to just polling the provided query ID. * This is important in shared contexts, where we cannot create arbitrary queries via POST – we can only GET. @@ -91,7 +101,15 @@ async function executeQuery( !!featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC] if (!pollOnly) { - const response = await api.query(queryNode, methodOptions, queryId, refresh, isAsyncQuery, filtersOverride) + const response = await api.query( + queryNode, + methodOptions, + queryId, + refresh, + isAsyncQuery, + filtersOverride, + variablesOverride + ) if (!isAsyncResponse(response)) { // Executed query synchronously or from cache @@ -124,6 +142,7 @@ export async function performQuery( queryId?: string, setPollResponse?: (status: QueryStatus) => void, filtersOverride?: DashboardFilter | null, + variablesOverride?: Record | null, pollOnly = false ): Promise> { let response: NonNullable @@ -141,6 +160,7 @@ export async function performQuery( queryId, setPollResponse, filtersOverride, + variablesOverride, pollOnly ) if (isHogQLQuery(queryNode) && response && typeof response === 'object') { diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 1f55d0478eae7..a9dc7be1bf476 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -401,10 +401,20 @@ }, "AlertCondition": { "additionalProperties": false, + "properties": { + "type": { + "$ref": "#/definitions/AlertConditionType" + } + }, + "required": ["type"], "type": "object" }, + "AlertConditionType": { + "enum": ["absolute_value", "relative_increase", "relative_decrease"], + "type": "string" + }, "AlertState": { - "enum": ["Firing", "Not firing", "Errored"], + "enum": ["Firing", "Not firing", "Errored", "Snoozed"], "type": "string" }, "AnyDataNode": { @@ -467,10 +477,10 @@ "$ref": "#/definitions/ErrorTrackingQuery" }, { - "$ref": "#/definitions/ExperimentFunnelQuery" + "$ref": "#/definitions/ExperimentFunnelsQuery" }, { - "$ref": "#/definitions/ExperimentTrendQuery" + "$ref": "#/definitions/ExperimentTrendsQuery" } ] }, @@ -1240,7 +1250,7 @@ ], "type": "object" }, - "CachedExperimentFunnelQueryResponse": { + "CachedExperimentFunnelsQueryResponse": { "additionalProperties": false, "properties": { "cache_key": { @@ -1254,9 +1264,22 @@ "description": "What triggered the calculation of the query, leave empty if user/immediate", "type": "string" }, + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, + "expected_loss": { + "type": "number" + }, "insight": { - "const": "FUNNELS", - "type": "string" + "$ref": "#/definitions/FunnelsQueryResponse" }, "is_cached": { "type": "boolean" @@ -1269,32 +1292,49 @@ "format": "date-time", "type": "string" }, + "probability": { + "additionalProperties": { + "type": "number" + }, + "type": "object" + }, "query_status": { "$ref": "#/definitions/QueryStatus", "description": "Query status indicates whether next to the provided data, a query is still running." }, - "results": { - "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantFunnelResult" - }, - "type": "object" + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" }, "timezone": { "type": "string" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantFunnelsBaseStats" + }, + "type": "array" } }, "required": [ "cache_key", + "credible_intervals", + "expected_loss", "insight", "is_cached", "last_refresh", "next_allowed_client_refresh", - "results", - "timezone" + "probability", + "significance_code", + "significant", + "timezone", + "variants" ], "type": "object" }, - "CachedExperimentTrendQueryResponse": { + "CachedExperimentTrendsQueryResponse": { "additionalProperties": false, "properties": { "cache_key": { @@ -1308,9 +1348,19 @@ "description": "What triggered the calculation of the query, leave empty if user/immediate", "type": "string" }, + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, "insight": { - "const": "TRENDS", - "type": "string" + "$ref": "#/definitions/TrendsQueryResponse" }, "is_cached": { "type": "boolean" @@ -1323,28 +1373,48 @@ "format": "date-time", "type": "string" }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." + "p_value": { + "type": "number" }, - "results": { + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantTrendResult" + "type": "number" }, "type": "object" }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, "timezone": { "type": "string" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" + }, + "type": "array" } }, "required": [ "cache_key", + "credible_intervals", "insight", "is_cached", "last_refresh", "next_allowed_client_refresh", - "results", - "timezone" + "p_value", + "probability", + "significance_code", + "significant", + "timezone", + "variants" ], "type": "object" }, @@ -3611,35 +3681,101 @@ { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, + "expected_loss": { + "type": "number" + }, "insight": { - "const": "FUNNELS", - "type": "string" + "$ref": "#/definitions/FunnelsQueryResponse" }, - "results": { + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantFunnelResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantFunnelsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "credible_intervals", + "expected_loss", + "insight", + "probability", + "significance_code", + "significant", + "variants" + ], "type": "object" }, { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, "insight": { - "const": "TRENDS", - "type": "string" + "$ref": "#/definitions/TrendsQueryResponse" }, - "results": { + "p_value": { + "type": "number" + }, + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantTrendResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "credible_intervals", + "insight", + "p_value", + "probability", + "significance_code", + "significant", + "variants" + ], "type": "object" } ] @@ -3757,10 +3893,10 @@ "$ref": "#/definitions/ErrorTrackingQuery" }, { - "$ref": "#/definitions/ExperimentFunnelQuery" + "$ref": "#/definitions/ExperimentFunnelsQuery" }, { - "$ref": "#/definitions/ExperimentTrendQuery" + "$ref": "#/definitions/ExperimentTrendsQuery" } ], "description": "Source of the events" @@ -4992,14 +5128,14 @@ "required": ["columns", "hogql", "results", "types"], "type": "object" }, - "ExperimentFunnelQuery": { + "ExperimentFunnelsQuery": { "additionalProperties": false, "properties": { "experiment_id": { "type": "integer" }, "kind": { - "const": "ExperimentFunnelQuery", + "const": "ExperimentFunnelsQuery", "type": "string" }, "modifiers": { @@ -5007,7 +5143,7 @@ "description": "Modifiers used when performing the query" }, "response": { - "$ref": "#/definitions/ExperimentFunnelQueryResponse" + "$ref": "#/definitions/ExperimentFunnelsQueryResponse" }, "source": { "$ref": "#/definitions/FunnelsQuery" @@ -5016,24 +5152,61 @@ "required": ["experiment_id", "kind", "source"], "type": "object" }, - "ExperimentFunnelQueryResponse": { + "ExperimentFunnelsQueryResponse": { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, + "expected_loss": { + "type": "number" + }, "insight": { - "const": "FUNNELS", - "type": "string" + "$ref": "#/definitions/FunnelsQueryResponse" }, - "results": { + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantFunnelResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantFunnelsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "insight", + "variants", + "probability", + "significant", + "significance_code", + "expected_loss", + "credible_intervals" + ], "type": "object" }, - "ExperimentTrendQuery": { + "ExperimentSignificanceCode": { + "enum": ["significant", "not_enough_exposure", "low_win_probability", "high_loss", "high_p_value"], + "type": "string" + }, + "ExperimentTrendsQuery": { "additionalProperties": false, "properties": { "count_query": { @@ -5046,7 +5219,7 @@ "$ref": "#/definitions/TrendsQuery" }, "kind": { - "const": "ExperimentTrendQuery", + "const": "ExperimentTrendsQuery", "type": "string" }, "modifiers": { @@ -5054,30 +5227,63 @@ "description": "Modifiers used when performing the query" }, "response": { - "$ref": "#/definitions/ExperimentTrendQueryResponse" + "$ref": "#/definitions/ExperimentTrendsQueryResponse" } }, "required": ["count_query", "experiment_id", "kind"], "type": "object" }, - "ExperimentTrendQueryResponse": { + "ExperimentTrendsQueryResponse": { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, "insight": { - "const": "TRENDS", - "type": "string" + "$ref": "#/definitions/TrendsQueryResponse" }, - "results": { + "p_value": { + "type": "number" + }, + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantTrendResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "insight", + "variants", + "probability", + "significant", + "significance_code", + "p_value", + "credible_intervals" + ], "type": "object" }, - "ExperimentVariantFunnelResult": { + "ExperimentVariantFunnelsBaseStats": { "additionalProperties": false, "properties": { "failure_count": { @@ -5093,7 +5299,7 @@ "required": ["key", "success_count", "failure_count"], "type": "object" }, - "ExperimentVariantTrendResult": { + "ExperimentVariantTrendsBaseStats": { "additionalProperties": false, "properties": { "absolute_exposure": { @@ -6808,12 +7014,20 @@ "InsightThreshold": { "additionalProperties": false, "properties": { - "absoluteThreshold": { - "$ref": "#/definitions/InsightsThresholdAbsolute" + "bounds": { + "$ref": "#/definitions/InsightsThresholdBounds" + }, + "type": { + "$ref": "#/definitions/InsightThresholdType" } }, + "required": ["type"], "type": "object" }, + "InsightThresholdType": { + "enum": ["absolute", "percentage"], + "type": "string" + }, "InsightVizNode": { "additionalProperties": false, "properties": { @@ -7116,7 +7330,7 @@ "required": ["kind"], "type": "object" }, - "InsightsThresholdAbsolute": { + "InsightsThresholdBounds": { "additionalProperties": false, "properties": { "lower": { @@ -7401,8 +7615,8 @@ "WebStatsTableQuery", "WebExternalClicksTableQuery", "WebGoalsQuery", - "ExperimentFunnelQuery", - "ExperimentTrendQuery", + "ExperimentFunnelsQuery", + "ExperimentTrendsQuery", "DatabaseSchemaQuery", "SuggestedQuestionsQuery", "TeamTaxonomyQuery", @@ -7886,6 +8100,12 @@ "$ref": "#/definitions/RefreshType", "default": "blocking", "description": "Whether results should be calculated sync or async, and how much to rely on the cache:\n- `'blocking'` - calculate synchronously (returning only when the query is done), UNLESS there are very fresh results in the cache\n- `'async'` - kick off background calculation (returning immediately with a query status), UNLESS there are very fresh results in the cache\n- `'lazy_async'` - kick off background calculation, UNLESS there are somewhat fresh results in the cache\n- `'force_blocking'` - calculate synchronously, even if fresh results are already cached\n- `'force_async'` - kick off background calculation, even if fresh results are already cached\n- `'force_cache'` - return cached data or a cache miss; always completes immediately as it never calculates Background calculation can be tracked using the `query_status` response field." + }, + "variables_override": { + "additionalProperties": { + "type": "object" + }, + "type": "object" } }, "required": ["query"], @@ -8662,35 +8882,101 @@ { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, + "expected_loss": { + "type": "number" + }, "insight": { - "const": "FUNNELS", - "type": "string" + "$ref": "#/definitions/FunnelsQueryResponse" }, - "results": { + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantFunnelResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantFunnelsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "insight", + "variants", + "probability", + "significant", + "significance_code", + "expected_loss", + "credible_intervals" + ], "type": "object" }, { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, "insight": { - "const": "TRENDS", - "type": "string" + "$ref": "#/definitions/TrendsQueryResponse" }, - "results": { + "p_value": { + "type": "number" + }, + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantTrendResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "insight", + "variants", + "probability", + "significant", + "significance_code", + "p_value", + "credible_intervals" + ], "type": "object" }, { @@ -9241,35 +9527,101 @@ { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, + "expected_loss": { + "type": "number" + }, "insight": { - "const": "FUNNELS", - "type": "string" + "$ref": "#/definitions/FunnelsQueryResponse" }, - "results": { + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantFunnelResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantFunnelsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "credible_intervals", + "expected_loss", + "insight", + "probability", + "significance_code", + "significant", + "variants" + ], "type": "object" }, { "additionalProperties": false, "properties": { + "credible_intervals": { + "additionalProperties": { + "items": { + "type": "number" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + }, + "type": "object" + }, "insight": { - "const": "TRENDS", - "type": "string" + "$ref": "#/definitions/TrendsQueryResponse" }, - "results": { + "p_value": { + "type": "number" + }, + "probability": { "additionalProperties": { - "$ref": "#/definitions/ExperimentVariantTrendResult" + "type": "number" }, "type": "object" + }, + "significance_code": { + "$ref": "#/definitions/ExperimentSignificanceCode" + }, + "significant": { + "type": "boolean" + }, + "variants": { + "items": { + "$ref": "#/definitions/ExperimentVariantTrendsBaseStats" + }, + "type": "array" } }, - "required": ["insight", "results"], + "required": [ + "credible_intervals", + "insight", + "p_value", + "probability", + "significance_code", + "significant", + "variants" + ], "type": "object" }, { @@ -9649,10 +10001,10 @@ "$ref": "#/definitions/ErrorTrackingQuery" }, { - "$ref": "#/definitions/ExperimentFunnelQuery" + "$ref": "#/definitions/ExperimentFunnelsQuery" }, { - "$ref": "#/definitions/ExperimentTrendQuery" + "$ref": "#/definitions/ExperimentTrendsQuery" }, { "$ref": "#/definitions/DataVisualizationNode" diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index aa0e00c43745b..1887f57ee0f96 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -100,8 +100,8 @@ export enum NodeKind { WebGoalsQuery = 'WebGoalsQuery', // Experiment queries - ExperimentFunnelQuery = 'ExperimentFunnelQuery', - ExperimentTrendQuery = 'ExperimentTrendQuery', + ExperimentFunnelsQuery = 'ExperimentFunnelsQuery', + ExperimentTrendsQuery = 'ExperimentTrendsQuery', // Database metadata DatabaseSchemaQuery = 'DatabaseSchemaQuery', @@ -133,8 +133,8 @@ export type AnyDataNode = | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery - | ExperimentFunnelQuery - | ExperimentTrendQuery + | ExperimentFunnelsQuery + | ExperimentTrendsQuery /** * @discriminator kind @@ -161,8 +161,8 @@ export type QuerySchema = | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery - | ExperimentFunnelQuery - | ExperimentTrendQuery + | ExperimentFunnelsQuery + | ExperimentTrendsQuery // Interface nodes | DataVisualizationNode @@ -610,8 +610,8 @@ export interface DataTableNode | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery - | ExperimentFunnelQuery - | ExperimentTrendQuery + | ExperimentFunnelsQuery + | ExperimentTrendsQuery )['response'] > >, @@ -631,8 +631,8 @@ export interface DataTableNode | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery - | ExperimentFunnelQuery - | ExperimentTrendQuery + | ExperimentFunnelsQuery + | ExperimentTrendsQuery /** Columns shown in the table, unless the `source` provides them. */ columns?: HogQLExpression[] /** Columns that aren't shown in the table, even if in columns or returned data */ @@ -1221,6 +1221,7 @@ export interface QueryRequest { */ query: QuerySchema filters_override?: DashboardFilter + variables_override?: Record> } /** @@ -1595,44 +1596,62 @@ export type InsightQueryNode = | StickinessQuery | LifecycleQuery -export interface ExperimentVariantTrendResult { +export interface ExperimentVariantTrendsBaseStats { key: string count: number exposure: number absolute_exposure: number } -export interface ExperimentVariantFunnelResult { +export interface ExperimentVariantFunnelsBaseStats { key: string success_count: number failure_count: number } -export interface ExperimentTrendQueryResponse { - insight: InsightType.TRENDS - results: Record +export enum ExperimentSignificanceCode { + Significant = 'significant', + NotEnoughExposure = 'not_enough_exposure', + LowWinProbability = 'low_win_probability', + HighLoss = 'high_loss', + HighPValue = 'high_p_value', } -export type CachedExperimentTrendQueryResponse = CachedQueryResponse +export interface ExperimentTrendsQueryResponse { + insight: TrendsQueryResponse + variants: ExperimentVariantTrendsBaseStats[] + probability: Record + significant: boolean + significance_code: ExperimentSignificanceCode + p_value: number + credible_intervals: Record +} + +export type CachedExperimentTrendsQueryResponse = CachedQueryResponse -export interface ExperimentFunnelQueryResponse { - insight: InsightType.FUNNELS - results: Record +export interface ExperimentFunnelsQueryResponse { + insight: FunnelsQueryResponse + variants: ExperimentVariantFunnelsBaseStats[] + probability: Record + significant: boolean + significance_code: ExperimentSignificanceCode + expected_loss: number + credible_intervals: Record } -export type CachedExperimentFunnelQueryResponse = CachedQueryResponse +export type CachedExperimentFunnelsQueryResponse = CachedQueryResponse -export interface ExperimentFunnelQuery extends DataNode { - kind: NodeKind.ExperimentFunnelQuery +export interface ExperimentFunnelsQuery extends DataNode { + kind: NodeKind.ExperimentFunnelsQuery source: FunnelsQuery experiment_id: integer } -export interface ExperimentTrendQuery extends DataNode { - kind: NodeKind.ExperimentTrendQuery +export interface ExperimentTrendsQuery extends DataNode { + kind: NodeKind.ExperimentTrendsQuery count_query: TrendsQuery // Defaults to $feature_flag_called if not specified - // https://github.com/PostHog/posthog/blob/master/posthog/hogql_queries/experiments/experiment_trend_query_runner.py + // https://github.com/PostHog/posthog/blob/master/posthog/hogql_queries/experiments/experiment_trends_query_runner.py exposure_query?: TrendsQuery experiment_id: integer } @@ -1961,25 +1980,38 @@ export interface DashboardFilter { properties?: AnyPropertyFilter[] | null } -export interface InsightsThresholdAbsolute { +export interface InsightsThresholdBounds { lower?: number upper?: number } +export enum InsightThresholdType { + ABSOLUTE = 'absolute', + PERCENTAGE = 'percentage', +} + export interface InsightThreshold { - absoluteThreshold?: InsightsThresholdAbsolute - // More types of thresholds or conditions can be added here + type: InsightThresholdType + bounds?: InsightsThresholdBounds +} + +export enum AlertConditionType { + ABSOLUTE_VALUE = 'absolute_value', // default alert, checks absolute value of current interval + RELATIVE_INCREASE = 'relative_increase', // checks increase in value during current interval compared to previous interval + RELATIVE_DECREASE = 'relative_decrease', // checks decrease in value during current interval compared to previous interval } export interface AlertCondition { // Conditions in addition to the separate threshold // TODO: Think about things like relative thresholds, rate of change, etc. + type: AlertConditionType } export enum AlertState { FIRING = 'Firing', NOT_FIRING = 'Not firing', ERRORED = 'Errored', + SNOOZED = 'Snoozed', } export enum AlertCalculationInterval { diff --git a/frontend/src/queries/types.ts b/frontend/src/queries/types.ts index c0becc7b8a6a4..afbae27286816 100644 --- a/frontend/src/queries/types.ts +++ b/frontend/src/queries/types.ts @@ -1,7 +1,7 @@ import { ComponentType, HTMLProps } from 'react' import { QueryFeature } from '~/queries/nodes/DataTable/queryFeatures' -import { DataTableNode, InsightVizNode } from '~/queries/schema' +import { DataTableNode, DataVisualizationNode, InsightVizNode } from '~/queries/schema' import { ChartDisplayType, GraphPointPayload, InsightLogicProps, TrendResult } from '~/types' /** Pass custom metadata to queries. Used for e.g. custom columns in the DataTable. */ @@ -37,12 +37,12 @@ export interface ChartRenderingMetadata { export type QueryContextColumnTitleComponent = ComponentType<{ columnName: string - query: DataTableNode + query: DataTableNode | DataVisualizationNode }> export type QueryContextColumnComponent = ComponentType<{ columnName: string - query: DataTableNode + query: DataTableNode | DataVisualizationNode record: unknown recordIndex: number value: unknown diff --git a/frontend/src/scenes/activity/explore/EventDetails.tsx b/frontend/src/scenes/activity/explore/EventDetails.tsx index 8226a7289e266..d96b6d67702d0 100644 --- a/frontend/src/scenes/activity/explore/EventDetails.tsx +++ b/frontend/src/scenes/activity/explore/EventDetails.tsx @@ -26,6 +26,7 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem const displayedEventProperties: Properties = {} const visibleSystemProperties: Properties = {} + const featureFlagProperties: Properties = {} let systemPropsCount = 0 for (const key of Object.keys(event.properties)) { if (CORE_FILTER_DEFINITIONS_BY_GROUP.events[key] && CORE_FILTER_DEFINITIONS_BY_GROUP.events[key].system) { @@ -35,7 +36,11 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem } } if (!CORE_FILTER_DEFINITIONS_BY_GROUP.events[key] || !CORE_FILTER_DEFINITIONS_BY_GROUP.events[key].system) { - displayedEventProperties[key] = event.properties[key] + if (key.startsWith('$feature') || key === '$active_feature_flags') { + featureFlagProperties[key] = event.properties[key] + } else { + displayedEventProperties[key] = event.properties[key] + } } } @@ -99,5 +104,25 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem }) } + if (Object.keys(featureFlagProperties).length > 0) { + tabs.push({ + key: 'feature_flags', + label: 'Feature flags', + content: ( +
+ +
+ ), + }) + } + return } diff --git a/frontend/src/scenes/activity/explore/Events.stories.tsx b/frontend/src/scenes/activity/explore/Events.stories.tsx index d38e1686b4ec9..0545ece9a5640 100644 --- a/frontend/src/scenes/activity/explore/Events.stories.tsx +++ b/frontend/src/scenes/activity/explore/Events.stories.tsx @@ -14,7 +14,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - '/api/projects/:team_id/query': eventsQuery, + '/api/environments/:team_id/query': eventsQuery, }, }), ], diff --git a/frontend/src/scenes/activity/live/liveEventsTableLogic.tsx b/frontend/src/scenes/activity/live/liveEventsTableLogic.tsx index 069a88b93813e..4e52a3d96ebf0 100644 --- a/frontend/src/scenes/activity/live/liveEventsTableLogic.tsx +++ b/frontend/src/scenes/activity/live/liveEventsTableLogic.tsx @@ -1,6 +1,5 @@ import { lemonToast, Spinner } from '@posthog/lemon-ui' -import { actions, connect, events, kea, listeners, path, reducers, selectors } from 'kea' -import { router } from 'kea-router' +import { actions, connect, events, kea, listeners, path, props, reducers, selectors } from 'kea' import { liveEventsHostOrigin } from 'lib/utils/apiHost' import { teamLogic } from 'scenes/teamLogic' @@ -10,8 +9,13 @@ import type { liveEventsTableLogicType } from './liveEventsTableLogicType' const ERROR_TOAST_ID = 'live-stream-error' +export interface LiveEventsTableProps { + showLiveStreamErrorToast: boolean +} + export const liveEventsTableLogic = kea([ path(['scenes', 'activity', 'live-events', 'liveEventsTableLogic']), + props({} as LiveEventsTableProps), connect({ values: [teamLogic, ['currentTeam']], }), @@ -26,7 +30,6 @@ export const liveEventsTableLogic = kea([ setClientSideFilters: (clientSideFilters) => ({ clientSideFilters }), pollStats: true, setStats: (stats) => ({ stats }), - showLiveStreamErrorToast: true, addEventHost: (eventHost) => ({ eventHost }), })), reducers({ @@ -110,7 +113,7 @@ export const liveEventsTableLogic = kea([ }, ], })), - listeners(({ actions, values, cache }) => ({ + listeners(({ actions, values, cache, props }) => ({ setFilters: () => { actions.clearEvents() actions.updateEventsConnection() @@ -153,15 +156,13 @@ export const liveEventsTableLogic = kea([ } source.onerror = function (e) { - console.error('Failed to poll events: ', e) - if ( - !cache.hasShownLiveStreamErrorToast && - !router.values.currentLocation.pathname.includes('onboarding') - ) { - lemonToast.error( - `Cannot connect to the live event stream. Continuing to retry in the background…`, - { icon: , toastId: ERROR_TOAST_ID, autoClose: false } - ) + if (!cache.hasShownLiveStreamErrorToast && props.showLiveStreamErrorToast) { + console.error('Failed to poll events. You likely have no events coming in.', e) + lemonToast.error(`No live events found. Continuing to retry in the background…`, { + icon: , + toastId: ERROR_TOAST_ID, + autoClose: false, + }) cache.hasShownLiveStreamErrorToast = true // Only show once } } diff --git a/frontend/src/scenes/appContextLogic.ts b/frontend/src/scenes/appContextLogic.ts index ce3a6a11a317c..3e17e10876136 100644 --- a/frontend/src/scenes/appContextLogic.ts +++ b/frontend/src/scenes/appContextLogic.ts @@ -7,6 +7,7 @@ import { UserType } from '~/types' import type { appContextLogicType } from './appContextLogicType' import { organizationLogic } from './organizationLogic' +import { projectLogic } from './projectLogic' import { teamLogic } from './teamLogic' import { userLogic } from './userLogic' @@ -19,7 +20,9 @@ export const appContextLogic = kea([ organizationLogic, ['loadCurrentOrganizationSuccess'], teamLogic, - ['loadCurrentTeam', 'loadCurrentTeamSuccess'], + ['loadCurrentTeam'], + projectLogic, + ['loadCurrentProject'], ], }), afterMount(({ actions }) => { @@ -43,6 +46,7 @@ export const appContextLogic = kea([ // NOTE: This doesn't fix the issue but removes the confusion of seeing incorrect user info in the UI actions.loadUserSuccess(remoteUser) actions.loadCurrentOrganizationSuccess(remoteUser.organization) + actions.loadCurrentProject() actions.loadCurrentTeam() } }) diff --git a/frontend/src/scenes/billing/Billing.tsx b/frontend/src/scenes/billing/Billing.tsx index e71b87f17be6f..75038e5fbd1ba 100644 --- a/frontend/src/scenes/billing/Billing.tsx +++ b/frontend/src/scenes/billing/Billing.tsx @@ -115,7 +115,7 @@ export function Billing(): JSX.Element { ) : null} {!billing?.has_active_subscription && platformAndSupportProduct && ( -
+
)} @@ -130,7 +130,7 @@ export function Billing(): JSX.Element { >
)} -
+

{billing?.has_active_subscription ? 'Billing period' : 'Cycle'}:{' '} {billing.billing_period.current_period_start.format('LL')} to{' '} @@ -202,7 +202,7 @@ export function Billing(): JSX.Element { remaining)

{!billing.has_active_subscription && ( -

+

Monthly free allocation resets at the end of the cycle.

)} @@ -212,8 +212,8 @@ export function Billing(): JSX.Element { )}
- {!isOnboarding && billing?.has_active_subscription && ( -
+ {!isOnboarding && billing?.customer_id && billing?.stripe_portal_url && ( +
- Manage card details and view past invoices + {billing.has_active_subscription + ? 'Manage card details and invoices' + : 'View past invoices'}
)} diff --git a/frontend/src/scenes/dashboard/Dashboard.tsx b/frontend/src/scenes/dashboard/Dashboard.tsx index d6576eb4fe20b..5f9e59ee897de 100644 --- a/frontend/src/scenes/dashboard/Dashboard.tsx +++ b/frontend/src/scenes/dashboard/Dashboard.tsx @@ -12,6 +12,7 @@ import { InsightErrorState } from 'scenes/insights/EmptyStates' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' +import { VariablesForDashboard } from '~/queries/nodes/DataVisualization/Components/Variables/Variables' import { DashboardMode, DashboardPlacement, DashboardType, QueryBasedInsightModel } from '~/types' import { DashboardHeader } from './DashboardHeader' @@ -124,6 +125,7 @@ function DashboardScene(): JSX.Element {
)}
+
)} diff --git a/frontend/src/scenes/dashboard/DashboardInsightCardLegend.stories.tsx b/frontend/src/scenes/dashboard/DashboardInsightCardLegend.stories.tsx index 5c9caef93e5d2..bf3eebbc439e1 100644 --- a/frontend/src/scenes/dashboard/DashboardInsightCardLegend.stories.tsx +++ b/frontend/src/scenes/dashboard/DashboardInsightCardLegend.stories.tsx @@ -11,8 +11,8 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/dashboards/1/': require('./__mocks__/dashboard_insight_card_legend_query.json'), - '/api/projects/:team_id/dashboards/2/': require('./__mocks__/dashboard_insight_card_legend_legacy.json'), + '/api/environments/:team_id/dashboards/1/': require('./__mocks__/dashboard_insight_card_legend_query.json'), + '/api/environments/:team_id/dashboards/2/': require('./__mocks__/dashboard_insight_card_legend_legacy.json'), }, }), ], diff --git a/frontend/src/scenes/dashboard/DashboardItems.tsx b/frontend/src/scenes/dashboard/DashboardItems.tsx index bef19ceff2796..1582bcc49480b 100644 --- a/frontend/src/scenes/dashboard/DashboardItems.tsx +++ b/frontend/src/scenes/dashboard/DashboardItems.tsx @@ -27,6 +27,7 @@ export function DashboardItems(): JSX.Element { refreshStatus, canEditDashboard, itemsLoading, + temporaryVariables, } = useValues(dashboardLogic) const { updateLayouts, @@ -152,6 +153,7 @@ export function DashboardItems(): JSX.Element { showDetailsControls={placement != DashboardPlacement.Export} placement={placement} loadPriority={smLayout ? smLayout.y * 1000 + smLayout.x : undefined} + variablesOverride={temporaryVariables} {...commonTileProps} /> ) diff --git a/frontend/src/scenes/dashboard/Dashboards.stories.tsx b/frontend/src/scenes/dashboard/Dashboards.stories.tsx index 34e6be5f20878..16d30235fa2a2 100644 --- a/frontend/src/scenes/dashboard/Dashboards.stories.tsx +++ b/frontend/src/scenes/dashboard/Dashboards.stories.tsx @@ -18,13 +18,13 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/dashboards/': require('./__mocks__/dashboards.json'), - '/api/projects/:team_id/dashboards/1/': require('./__mocks__/dashboard1.json'), - '/api/projects/:team_id/dashboards/1/collaborators/': [], - '/api/projects/:team_id/dashboards/2/': [500, { detail: 'Server error' }], + '/api/environments/:team_id/dashboards/': require('./__mocks__/dashboards.json'), + '/api/environments/:team_id/dashboards/1/': require('./__mocks__/dashboard1.json'), + '/api/environments/:team_id/dashboards/1/collaborators/': [], + '/api/environments/:team_id/dashboards/2/': [500, { detail: 'Server error' }], '/api/projects/:team_id/dashboard_templates/': require('./__mocks__/dashboard_templates.json'), '/api/projects/:team_id/dashboard_templates/json_schema/': require('./__mocks__/dashboard_template_schema.json'), - '/api/projects/:team_id/dashboards/:dash_id/sharing/': { + '/api/environments/:team_id/dashboards/:dash_id/sharing/': { created_at: '2023-02-25T13:28:20.454940Z', enabled: false, access_token: 'a-secret-token', diff --git a/frontend/src/scenes/dashboard/dashboardLogic.test.ts b/frontend/src/scenes/dashboard/dashboardLogic.test.ts index fea16483894fd..81dac66ad0317 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.test.ts +++ b/frontend/src/scenes/dashboard/dashboardLogic.test.ts @@ -179,7 +179,7 @@ describe('dashboardLogic', () => { } useMocks({ get: { - '/api/projects/:team/query/123/': () => [ + '/api/environments/:team_id/query/123/': () => [ 200, { query_status: { @@ -187,14 +187,14 @@ describe('dashboardLogic', () => { }, }, ], - '/api/projects/:team/dashboards/5/': { ...dashboards['5'] }, - '/api/projects/:team/dashboards/6/': { ...dashboards['6'] }, - '/api/projects/:team/dashboards/7/': () => [500, '💣'], - '/api/projects/:team/dashboards/8/': { ...dashboards['8'] }, - '/api/projects/:team/dashboards/9/': { ...dashboards['9'] }, - '/api/projects/:team/dashboards/10/': { ...dashboards['10'] }, - '/api/projects/:team/dashboards/11/': { ...dashboards['11'] }, - '/api/projects/:team/dashboards/': { + '/api/environments/:team_id/dashboards/5/': { ...dashboards['5'] }, + '/api/environments/:team_id/dashboards/6/': { ...dashboards['6'] }, + '/api/environments/:team_id/dashboards/7/': () => [500, '💣'], + '/api/environments/:team_id/dashboards/8/': { ...dashboards['8'] }, + '/api/environments/:team_id/dashboards/9/': { ...dashboards['9'] }, + '/api/environments/:team_id/dashboards/10/': { ...dashboards['10'] }, + '/api/environments/:team_id/dashboards/11/': { ...dashboards['11'] }, + '/api/environments/:team_id/dashboards/': { count: 6, next: null, previous: null, @@ -206,9 +206,9 @@ describe('dashboardLogic', () => { { ...dashboards['10'] }, ], }, - '/api/projects/:team/insights/1001/': () => [500, '💣'], - '/api/projects/:team/insights/800/': () => [200, { ...insights['800'] }], - '/api/projects/:team/insights/:id/': (req) => { + '/api/environments/:team_id/insights/1001/': () => [500, '💣'], + '/api/environments/:team_id/insights/800/': () => [200, { ...insights['800'] }], + '/api/environments/:team_id/insights/:id/': (req) => { const dashboard = req.url.searchParams.get('from_dashboard') if (!dashboard) { throw new Error('the logic must always add this param') @@ -221,15 +221,15 @@ describe('dashboardLogic', () => { }, }, post: { - '/api/projects/:team/insights/cancel/': [201], + '/api/environments/:team_id/insights/cancel/': [201], }, patch: { - '/api/projects/:team/dashboards/:id/': async (req) => { + '/api/environments/:team_id/dashboards/:id/': async (req) => { const dashboardId = typeof req.params['id'] === 'string' ? req.params['id'] : req.params['id'][0] const payload = await req.json() return [200, { ...dashboards[dashboardId], ...payload }] }, - '/api/projects/:team/dashboards/:id/move_tile/': async (req) => { + '/api/environments/:team_id/dashboards/:id/move_tile/': async (req) => { // backend updates the two dashboards and the insight const jsonPayload = await req.json() const { toDashboard, tile: tileToUpdate } = jsonPayload @@ -256,7 +256,7 @@ describe('dashboardLogic', () => { return [200, { ...from }] }, - '/api/projects/:team/insights/:id/': async (req) => { + '/api/environments/:team_id/insights/:id/': async (req) => { try { const updates = await req.json() if (typeof updates !== 'object') { @@ -306,10 +306,10 @@ describe('dashboardLogic', () => { jest.spyOn(api, 'update') await expectLogic(logic, () => { - logic.actions.updateFiltersAndLayouts() + logic.actions.updateFiltersAndLayoutsAndVariables() }).toFinishAllListeners() - expect(api.update).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/dashboards/5`, { + expect(api.update).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/dashboards/5`, { tiles: [ { id: 0, @@ -329,6 +329,7 @@ describe('dashboardLogic', () => { date_to: null, properties: [], }, + variables: {}, }) }) }) @@ -393,7 +394,7 @@ describe('dashboardLogic', () => { await expectLogic(dashboardEightlogic).toFinishAllListeners() expect(api.update).toHaveBeenCalledWith( - `api/projects/${MOCK_TEAM_ID}/dashboards/${9}/move_tile`, + `api/environments/${MOCK_TEAM_ID}/dashboards/${9}/move_tile`, expect.objectContaining({ tile: sourceTile, toDashboard: 8 }) ) }) diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index 2cbb301df28d3..4addf1f04f4c0 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -23,6 +23,7 @@ import { Link } from 'lib/lemon-ui/Link' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { clearDOMTextSelection, isAbortedRequest, shouldCancelQuery, toParams, uuid } from 'lib/utils' import { DashboardEventSource, eventUsageLogic } from 'lib/utils/eventUsageLogic' +import uniqBy from 'lodash.uniqby' import { Layout, Layouts } from 'react-grid-layout' import { calculateLayouts } from 'scenes/dashboard/tileLayouts' import { Scene } from 'scenes/sceneTypes' @@ -31,9 +32,11 @@ import { userLogic } from 'scenes/userLogic' import { dashboardsModel } from '~/models/dashboardsModel' import { insightsModel } from '~/models/insightsModel' +import { variableDataLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variableDataLogic' +import { Variable } from '~/queries/nodes/DataVisualization/types' import { getQueryBasedDashboard, getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils' import { pollForResults } from '~/queries/query' -import { DashboardFilter, RefreshType } from '~/queries/schema' +import { DashboardFilter, DataVisualizationNode, HogQLVariable, NodeKind, RefreshType } from '~/queries/schema' import { AnyPropertyFilter, Breadcrumb, @@ -139,14 +142,16 @@ async function getSingleInsight( queryId: string, refresh: RefreshType, methodOptions?: ApiMethodOptions, - filtersOverride?: DashboardFilter + filtersOverride?: DashboardFilter, + variablesOverride?: Record ): Promise { - const apiUrl = `api/projects/${currentTeamId}/insights/${insight.id}/?${toParams({ + const apiUrl = `api/environments/${currentTeamId}/insights/${insight.id}/?${toParams({ refresh, from_dashboard: dashboardId, // needed to load insight in correct context client_query_id: queryId, session_id: currentSessionId(), ...(filtersOverride ? { filters_override: filtersOverride } : {}), + ...(variablesOverride ? { variables_override: variablesOverride } : {}), })}` const insightResponse: Response = await api.getResponse(apiUrl, methodOptions) const legacyInsight: InsightModel | null = await getJSONOrNull(insightResponse) @@ -156,7 +161,7 @@ async function getSingleInsight( export const dashboardLogic = kea([ path(['scenes', 'dashboard', 'dashboardLogic']), connect(() => ({ - values: [teamLogic, ['currentTeamId'], featureFlagLogic, ['featureFlags']], + values: [teamLogic, ['currentTeamId'], featureFlagLogic, ['featureFlags'], variableDataLogic, ['variables']], logic: [dashboardsModel, insightsModel, eventUsageLogic], })), @@ -169,7 +174,7 @@ export const dashboardLogic = kea([ return props.id }), - actions({ + actions(({ values }) => ({ loadDashboard: (payload: { refresh?: RefreshType action: @@ -201,7 +206,10 @@ export const dashboardLogic = kea([ date_to, }), setProperties: (properties: AnyPropertyFilter[] | null) => ({ properties }), - setFiltersAndLayouts: (filters: DashboardFilter) => ({ filters }), + setFiltersAndLayoutsAndVariables: (filters: DashboardFilter, variables: Record) => ({ + filters, + variables, + }), setAutoRefresh: (enabled: boolean, interval: number) => ({ enabled, interval }), setRefreshStatus: (shortId: InsightShortId, loading = false, queued = false) => ({ shortId, loading, queued }), setRefreshStatuses: (shortIds: InsightShortId[], loading = false, queued = false) => ({ @@ -233,8 +241,14 @@ export const dashboardLogic = kea([ setInitialLoadResponseBytes: (responseBytes: number) => ({ responseBytes }), abortQuery: (payload: { dashboardQueryId: string; queryId: string; queryStartTime: number }) => payload, abortAnyRunningQuery: true, - updateFiltersAndLayouts: true, - }), + updateFiltersAndLayoutsAndVariables: true, + overrideVariableValue: (variableId: string, value: any) => ({ + variableId, + value, + allVariables: values.variables, + }), + resetVariables: () => ({ variables: values.insightVariables }), + })), loaders(({ actions, props, values }) => ({ dashboard: [ @@ -248,7 +262,8 @@ export const dashboardLogic = kea([ try { const apiUrl = values.apiUrl( refresh || 'async', - action === 'preview' ? values.temporaryFilters : undefined + action === 'preview' ? values.temporaryFilters : undefined, + action === 'preview' ? values.temporaryVariables : undefined ) const dashboardResponse: Response = await api.getResponse(apiUrl) const dashboard: DashboardType | null = await getJSONOrNull(dashboardResponse) @@ -282,7 +297,7 @@ export const dashboardLogic = kea([ throw error } }, - updateFiltersAndLayouts: async (_, breakpoint) => { + updateFiltersAndLayoutsAndVariables: async (_, breakpoint) => { actions.abortAnyRunningQuery() try { @@ -294,9 +309,10 @@ export const dashboardLogic = kea([ breakpoint() const dashboard: DashboardType = await api.update( - `api/projects/${values.currentTeamId}/dashboards/${props.id}`, + `api/environments/${values.currentTeamId}/dashboards/${props.id}`, { filters: values.filters, + variables: values.insightVariables, tiles: layoutsToUpdate, } ) @@ -307,7 +323,7 @@ export const dashboardLogic = kea([ } }, updateTileColor: async ({ tileId, color }) => { - await api.update(`api/projects/${values.currentTeamId}/dashboards/${props.id}`, { + await api.update(`api/environments/${values.currentTeamId}/dashboards/${props.id}`, { tiles: [{ id: tileId, color }], }) const matchingTile = values.tiles.find((tile) => tile.id === tileId) @@ -318,7 +334,7 @@ export const dashboardLogic = kea([ }, removeTile: async ({ tile }) => { try { - await api.update(`api/projects/${values.currentTeamId}/dashboards/${props.id}`, { + await api.update(`api/environments/${values.currentTeamId}/dashboards/${props.id}`, { tiles: [{ id: tile.id, deleted: true }], }) dashboardsModel.actions.tileRemovedFromDashboard({ @@ -361,7 +377,7 @@ export const dashboardLogic = kea([ } const dashboard: DashboardType = await api.update( - `api/projects/${values.currentTeamId}/dashboards/${props.id}`, + `api/environments/${values.currentTeamId}/dashboards/${props.id}`, { tiles: [newTile], } @@ -381,7 +397,7 @@ export const dashboardLogic = kea([ return values.dashboard } const dashboard: DashboardType = await api.update( - `api/projects/${teamLogic.values.currentTeamId}/dashboards/${props.id}/move_tile`, + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${props.id}/move_tile`, { tile, toDashboard, @@ -432,6 +448,48 @@ export const dashboardLogic = kea([ }, }, ], + temporaryVariables: [ + {} as Record, + { + overrideVariableValue: (state, { variableId, value, allVariables }) => { + const foundExistingVar = allVariables.find((n) => n.id === variableId) + if (!foundExistingVar) { + return state + } + + return { + ...state, + [variableId]: { code_name: foundExistingVar.code_name, variableId: foundExistingVar.id, value }, + } + }, + resetVariables: (_, { variables }) => ({ ...variables }), + loadDashboardSuccess: (state, { dashboard, payload }) => + dashboard + ? { + ...state, + // don't update filters if we're previewing + ...(payload?.action === 'preview' ? {} : dashboard.variables ?? {}), + } + : state, + }, + ], + insightVariables: [ + {} as Record, + { + setFiltersAndLayoutsAndVariables: (state, { variables }) => ({ + ...state, + ...variables, + }), + loadDashboardSuccess: (state, { dashboard, payload }) => + dashboard + ? { + ...state, + // don't update filters if we're previewing + ...(payload?.action === 'preview' ? {} : dashboard.variables ?? {}), + } + : state, + }, + ], temporaryFilters: [ { date_from: null, @@ -466,7 +524,7 @@ export const dashboardLogic = kea([ properties: null, } as DashboardFilter, { - setFiltersAndLayouts: (state, { filters }) => ({ + setFiltersAndLayoutsAndVariables: (state, { filters }) => ({ ...state, ...filters, }), @@ -689,6 +747,44 @@ export const dashboardLogic = kea([ ], })), selectors(() => ({ + dashboardVariables: [ + (s) => [s.dashboard, s.variables, s.temporaryVariables], + ( + dashboard: DashboardType, + allVariables: Variable[], + temporaryVariables: Record + ): Variable[] => { + const dataVizNodes = dashboard.tiles + .map((n) => n.insight?.query) + .filter((n) => n?.kind === NodeKind.DataVisualizationNode) + .filter((n): n is DataVisualizationNode => Boolean(n)) + const hogQLVariables = dataVizNodes + .map((n) => n.source.variables) + .filter((n): n is Record => Boolean(n)) + .flatMap((n) => Object.values(n)) + + const uniqueVars = uniqBy(hogQLVariables, (n) => n.variableId) + return uniqueVars + .map((v) => { + const foundVar = allVariables.find((n) => n.id === v.variableId) + + if (!foundVar) { + return null + } + + const overridenValue = temporaryVariables[v.variableId]?.value + + // Overwrite the variable `value` from the insight + const resultVar: Variable = { + ...foundVar, + value: overridenValue ?? v.value ?? foundVar.value, + } + + return resultVar + }) + .filter((n): n is Variable => Boolean(n)) + }, + ], asDashboardTemplate: [ (s) => [s.dashboard], (dashboard: DashboardType): DashboardTemplateEditorType | undefined => { @@ -731,10 +827,15 @@ export const dashboardLogic = kea([ apiUrl: [ () => [(_, props) => props.id], (id) => { - return (refresh?: RefreshType, filtersOverride?: DashboardFilter) => - `api/projects/${teamLogic.values.currentTeamId}/dashboards/${id}/?${toParams({ + return ( + refresh?: RefreshType, + filtersOverride?: DashboardFilter, + variablesOverride?: Record + ) => + `api/environments/${teamLogic.values.currentTeamId}/dashboards/${id}/?${toParams({ refresh, filters_override: filtersOverride, + variables_override: variablesOverride, })}` }, ], @@ -947,7 +1048,7 @@ export const dashboardLogic = kea([ }, })), listeners(({ actions, values, cache, props, sharedListeners }) => ({ - updateFiltersAndLayoutsSuccess: () => { + updateFiltersAndLayoutsAndVariablesSuccess: () => { actions.loadDashboard({ action: 'update' }) }, setRefreshError: sharedListeners.reportRefreshTiming, @@ -1046,7 +1147,10 @@ export const dashboardLogic = kea([ insight, dashboardId, uuid(), - 'force_async' + 'force_async', + undefined, + undefined, + values.temporaryVariables ) dashboardsModel.actions.updateDashboardInsight(refreshedInsight!) // Start polling for results @@ -1138,7 +1242,8 @@ export const dashboardLogic = kea([ queryId, 'force_cache', methodOptions, - action === 'preview' ? values.temporaryFilters : undefined + action === 'preview' ? values.temporaryFilters : undefined, + action === 'preview' ? values.temporaryVariables : undefined ) if (action === 'preview' && polledInsight!.dashboard_tiles) { @@ -1187,8 +1292,8 @@ export const dashboardLogic = kea([ eventUsageLogic.actions.reportDashboardRefreshed(dashboardId, values.newestRefreshed) }, - setFiltersAndLayouts: ({ filters: { date_from, date_to } }) => { - actions.updateFiltersAndLayouts() + setFiltersAndLayoutsAndVariables: ({ filters: { date_from, date_to } }) => { + actions.updateFiltersAndLayoutsAndVariables() eventUsageLogic.actions.reportDashboardDateRangeChanged(date_from, date_to) eventUsageLogic.actions.reportDashboardPropertiesChanged() }, @@ -1203,12 +1308,13 @@ export const dashboardLogic = kea([ // reset filters to that before previewing actions.setDates(values.filters.date_from ?? null, values.filters.date_to ?? null) actions.setProperties(values.filters.properties ?? null) + actions.resetVariables() // also reset layout to that we stored in dashboardLayouts // this is done in the reducer for dashboard } else if (source === DashboardEventSource.DashboardHeaderSaveDashboard) { // save edit mode changes - actions.setFiltersAndLayouts(values.temporaryFilters) + actions.setFiltersAndLayoutsAndVariables(values.temporaryFilters, values.temporaryVariables) } } @@ -1282,7 +1388,7 @@ export const dashboardLogic = kea([ abortQuery: async ({ dashboardQueryId, queryId, queryStartTime }) => { const { currentTeamId } = values - await api.create(`api/projects/${currentTeamId}/insights/cancel`, { client_query_id: dashboardQueryId }) + await api.create(`api/environments/${currentTeamId}/insights/cancel`, { client_query_id: dashboardQueryId }) // TRICKY: we cancel just once using the dashboard query id. // we can record the queryId that happened to capture the AbortError exception @@ -1305,6 +1411,10 @@ export const dashboardLogic = kea([ setDates: () => { actions.loadDashboard({ action: 'preview' }) }, + overrideVariableValue: () => { + actions.setDashboardMode(DashboardMode.Edit, null) + actions.loadDashboard({ action: 'preview' }) + }, })), urlToAction(({ values, actions }) => ({ diff --git a/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.test.ts b/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.test.ts index 61d98a07a914f..d3723a18f7deb 100644 --- a/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.test.ts +++ b/frontend/src/scenes/dashboard/dashboards/dashboardsLogic.test.ts @@ -39,7 +39,7 @@ describe('dashboardsLogic', () => { beforeEach(async () => { useMocks({ get: { - '/api/projects/:team/dashboards/': { + '/api/environments/:team_id/dashboards/': { count: 6, next: null, previous: null, diff --git a/frontend/src/scenes/dashboard/newDashboardLogic.ts b/frontend/src/scenes/dashboard/newDashboardLogic.ts index 44fa0252f5889..6749067872258 100644 --- a/frontend/src/scenes/dashboard/newDashboardLogic.ts +++ b/frontend/src/scenes/dashboard/newDashboardLogic.ts @@ -137,7 +137,7 @@ export const newDashboardLogic = kea([ actions.setIsLoading(true) try { const result: DashboardType = await api.create( - `api/projects/${teamLogic.values.currentTeamId}/dashboards/`, + `api/environments/${teamLogic.values.currentTeamId}/dashboards/`, { name: name, description: description, @@ -195,7 +195,7 @@ export const newDashboardLogic = kea([ try { const result: DashboardType = await api.create( - `api/projects/${teamLogic.values.currentTeamId}/dashboards/create_from_template_json`, + `api/environments/${teamLogic.values.currentTeamId}/dashboards/create_from_template_json`, { template: dashboardJSON, creation_context: creationContext } ) actions.hideNewDashboardModal() diff --git a/frontend/src/scenes/data-management/DataManagementScene.stories.tsx b/frontend/src/scenes/data-management/DataManagementScene.stories.tsx index 0df671d07f3bd..b8ce6c6ae967d 100644 --- a/frontend/src/scenes/data-management/DataManagementScene.stories.tsx +++ b/frontend/src/scenes/data-management/DataManagementScene.stories.tsx @@ -238,7 +238,7 @@ const meta: Meta = { }, }, post: { - '/api/projects/:team_id/query/': (req) => { + '/api/environments/:team_id/query/': (req) => { if ((req.body as any).query.kind === 'DatabaseSchemaQuery') { return [200, MOCK_DATABASE] } diff --git a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts index 2767047a54cdc..461b35d5a3acc 100644 --- a/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts +++ b/frontend/src/scenes/data-management/events/eventDefinitionsTableLogic.test.ts @@ -98,7 +98,7 @@ describe('eventDefinitionsTableLogic', () => { ] } }, - '/api/projects/:team/events': (req) => { + '/api/environments/:team_id/events': (req) => { if ( req.url.searchParams.get('limit') === '1' && req.url.searchParams.get('event') === 'event_with_example' @@ -259,13 +259,13 @@ describe('eventDefinitionsTableLogic', () => { [propertiesStartingUrl]: partial({ count: 5, }), - [`api/projects/${MOCK_TEAM_ID}/events?event=event1&limit=1`]: partial(mockEvent.properties), + [`api/environments/${MOCK_TEAM_ID}/events?event=event1&limit=1`]: partial(mockEvent.properties), }), }) expect(api.get).toHaveBeenCalledTimes(3) expect(api.get).toHaveBeenNthCalledWith(1, propertiesStartingUrl) - expect(api.get).toHaveBeenNthCalledWith(2, `api/projects/${MOCK_TEAM_ID}/events?event=event1&limit=1`) + expect(api.get).toHaveBeenNthCalledWith(2, `api/environments/${MOCK_TEAM_ID}/events?event=event1&limit=1`) expect(api.get).toHaveBeenNthCalledWith(3, startingUrl) await expectLogic(logic, () => { diff --git a/frontend/src/scenes/data-warehouse/external/dataWarehouseExternalSceneLogic.ts b/frontend/src/scenes/data-warehouse/external/dataWarehouseExternalSceneLogic.ts index 17da8d174d375..77f809d35bfe6 100644 --- a/frontend/src/scenes/data-warehouse/external/dataWarehouseExternalSceneLogic.ts +++ b/frontend/src/scenes/data-warehouse/external/dataWarehouseExternalSceneLogic.ts @@ -104,6 +104,7 @@ export const dataWarehouseExternalSceneLogic = kea } -const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConfig): JSX.Element => { +const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConfig, lastValue?: any): JSX.Element => { if (field.type === 'switch-group') { return ( {({ value, onChange }) => ( <> - + {value && ( - {field.fields.map((field) => sourceFieldToElement(field, sourceConfig))} + {field.fields.map((field) => + sourceFieldToElement(field, sourceConfig, lastValue?.[field.name]) + )} )} @@ -43,11 +47,21 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf > {({ value, onChange }) => ( <> - + {field.options .find((n) => n.value === (value ?? field.defaultValue)) - ?.fields?.map((field) => sourceFieldToElement(field, sourceConfig))} + ?.fields?.map((field) => + sourceFieldToElement(field, sourceConfig, lastValue?.[field.name]) + )} )} @@ -63,6 +77,7 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf data-attr={field.name} placeholder={field.placeholder} minRows={4} + defaultValue={lastValue} /> ) @@ -102,32 +117,33 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf data-attr={field.name} placeholder={field.placeholder} type={field.type} + defaultValue={lastValue} /> ) } -export default function SourceForm({ sourceConfig }: SourceFormProps): JSX.Element { - const { source } = useValues(sourceWizardLogic) - const showSourceFields = SOURCE_DETAILS[sourceConfig.name].showSourceForm - ? SOURCE_DETAILS[sourceConfig.name].showSourceForm?.(source.payload) - : true - const showPrefix = SOURCE_DETAILS[sourceConfig.name].showPrefix - ? SOURCE_DETAILS[sourceConfig.name].showPrefix?.(source.payload) - : true +export default function SourceFormContainer(props: SourceFormProps): JSX.Element { + return ( +
+ + + ) +} +export function SourceFormComponent({ sourceConfig, showPrefix = true, jobInputs }: SourceFormProps): JSX.Element { return ( -
- {showSourceFields && ( - - {SOURCE_DETAILS[sourceConfig.name].fields.map((field) => sourceFieldToElement(field, sourceConfig))} - - )} +
+ + {SOURCE_DETAILS[sourceConfig.name].fields.map((field) => + sourceFieldToElement(field, sourceConfig, jobInputs?.[field.name]) + )} + {showPrefix && ( )} - +
) } diff --git a/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx b/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx index 52238ef21654c..8d4bafe96632a 100644 --- a/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx +++ b/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx @@ -10,7 +10,7 @@ const getIncrementalSyncSupported = ( if (!schema.incremental_available) { return { disabled: true, - disabledReason: "Incremental append replication isn't supported on this table", + disabledReason: "Incremental replication isn't supported on this table", } } @@ -109,23 +109,18 @@ export const SyncMethodForm = ({ label: (
-

Incremental append replication

+

Incremental replication

{!incrementalSyncSupported.disabled && ( Recommended )}

- When using incremental append replication, we'll store the max value of the below - field on each sync and only sync rows with greater or equal value on the next run. + When using incremental replication, we'll store the max value of the below field on + each sync and only sync rows with greater or equal value on the next run.

- You should pick a field that increments for each row, such as a{' '} - created_at timestamp. -

-

- This method will append all new rows to your existing table - this means duplicate - data can exist if the incremental field updates for updated rows (such as when using - an updated_at field) + You should pick a field that increments or updates each time the row is updated, + such as a updated_at timestamp.

{showRefreshMessage && (

- Note: Changing the sync type or incremental append replication field will trigger a full table - refresh + Note: Changing the sync type or incremental replication field will trigger a full table refresh

)}
@@ -185,7 +179,7 @@ export const SyncMethodForm = ({ (n) => n.field === incrementalFieldValue ) if (!fieldSelected) { - lemonToast.error('Selected field for incremental append replication not found') + lemonToast.error('Selected field for incremental replication not found') return } diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts index 9eeaa4c96f832..c0adf6907c65d 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts @@ -7,7 +7,7 @@ import posthog from 'posthog-js' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import { DatabaseSchemaDataWarehouseTable } from '~/queries/schema' -import { DataWarehouseSettingsTab, ExternalDataSourceSchema, ExternalDataStripeSource } from '~/types' +import { DataWarehouseSettingsTab, ExternalDataSource, ExternalDataSourceSchema } from '~/types' import type { dataWarehouseSettingsLogicType } from './dataWarehouseSettingsLogicType' @@ -31,9 +31,9 @@ export const dataWarehouseSettingsLogic = kea([ actions: [databaseTableListLogic, ['loadDatabase']], })), actions({ - deleteSource: (source: ExternalDataStripeSource) => ({ source }), - reloadSource: (source: ExternalDataStripeSource) => ({ source }), - sourceLoadingFinished: (source: ExternalDataStripeSource) => ({ source }), + deleteSource: (source: ExternalDataSource) => ({ source }), + reloadSource: (source: ExternalDataSource) => ({ source }), + sourceLoadingFinished: (source: ExternalDataSource) => ({ source }), schemaLoadingFinished: (schema: ExternalDataSourceSchema) => ({ schema }), abortAnyRunningQuery: true, deleteSelfManagedTable: (tableId: string) => ({ tableId }), @@ -41,7 +41,7 @@ export const dataWarehouseSettingsLogic = kea([ }), loaders(({ cache, actions, values }) => ({ dataWarehouseSources: [ - null as PaginatedResponse | null, + null as PaginatedResponse | null, { loadSources: async (_, breakpoint) => { await breakpoint(300) @@ -59,7 +59,7 @@ export const dataWarehouseSettingsLogic = kea([ return res }, - updateSource: async (source: ExternalDataStripeSource) => { + updateSource: async (source: ExternalDataSource) => { const updatedSource = await api.externalDataSources.update(source.id, source) return { ...values.dataWarehouseSources, @@ -77,7 +77,7 @@ export const dataWarehouseSettingsLogic = kea([ // Optimistic UI updates before sending updates to the backend const clonedSources = JSON.parse( JSON.stringify(values.dataWarehouseSources?.results ?? []) - ) as ExternalDataStripeSource[] + ) as ExternalDataSource[] const sourceIndex = clonedSources.findIndex((n) => n.schemas.find((m) => m.id === schema.id)) const schemaIndex = clonedSources[sourceIndex].schemas.findIndex((n) => n.id === schema.id) clonedSources[sourceIndex].schemas[schemaIndex] = schema @@ -166,7 +166,7 @@ export const dataWarehouseSettingsLogic = kea([ // Optimistic UI updates before sending updates to the backend const clonedSources = JSON.parse( JSON.stringify(values.dataWarehouseSources?.results ?? []) - ) as ExternalDataStripeSource[] + ) as ExternalDataSource[] const sourceIndex = clonedSources.findIndex((n) => n.id === source.id) clonedSources[sourceIndex].status = 'Running' clonedSources[sourceIndex].schemas = clonedSources[sourceIndex].schemas.map((n) => { diff --git a/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx new file mode 100644 index 0000000000000..9ad92924dcabc --- /dev/null +++ b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx @@ -0,0 +1,58 @@ +import { LemonBanner, LemonButton, LemonSkeleton } from '@posthog/lemon-ui' +import { BindLogic, useValues } from 'kea' +import { Form } from 'kea-forms' +import { SourceFormComponent, SourceFormProps } from 'scenes/data-warehouse/external/forms/SourceForm' + +import { dataWarehouseSourceSettingsLogic } from './dataWarehouseSourceSettingsLogic' + +interface SourceConfigurationProps { + id: string +} + +export const SourceConfiguration = ({ id }: SourceConfigurationProps): JSX.Element => { + const { sourceFieldConfig } = useValues(dataWarehouseSourceSettingsLogic({ id })) + return ( + + {sourceFieldConfig ? ( + + ) : ( + + )} + + ) +} + +interface UpdateSourceConnectionFormContainerProps extends SourceFormProps { + id: string +} + +function UpdateSourceConnectionFormContainer(props: UpdateSourceConnectionFormContainerProps): JSX.Element { + const { source, sourceLoading } = useValues(dataWarehouseSourceSettingsLogic({ id: props.id })) + + if (source?.source_type !== 'MSSQL' && source?.source_type !== 'MySQL' && source?.source_type !== 'Postgres') { + return ( + +

+ Only Postgres, MSSQL, and MySQL are configurable. Please delete and recreate your source if you need + to connect to a new source of the same type. +

+
+ ) + } + return ( +
+ +
+ + Save + +
+ + ) +} diff --git a/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts index d7f290b850114..3343eec78e0e0 100644 --- a/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts @@ -1,10 +1,12 @@ import { lemonToast } from '@posthog/lemon-ui' -import { actions, afterMount, kea, key, listeners, path, props, reducers } from 'kea' +import { actions, afterMount, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import api from 'lib/api' import posthog from 'posthog-js' +import { SOURCE_DETAILS } from 'scenes/data-warehouse/new/sourceWizardLogic' -import { ExternalDataJob, ExternalDataSourceSchema, ExternalDataStripeSource } from '~/types' +import { ExternalDataJob, ExternalDataSource, ExternalDataSourceSchema } from '~/types' import type { dataWarehouseSourceSettingsLogicType } from './dataWarehouseSourceSettingsLogicType' @@ -26,14 +28,14 @@ export const dataWarehouseSourceSettingsLogic = kea ({ source: [ - null as ExternalDataStripeSource | null, + null as ExternalDataSource | null, { loadSource: async () => { return await api.externalDataSources.get(values.sourceId) }, updateSchema: async (schema: ExternalDataSourceSchema) => { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.schemas[schemaIndex] = schema actions.loadSourceSuccess(clonedSource) @@ -47,6 +49,11 @@ export const dataWarehouseSourceSettingsLogic = kea { + const updatedSource = await api.externalDataSources.update(values.sourceId, source) + actions.loadSourceSuccess(updatedSource) + return updatedSource + }, }, ], jobs: [ @@ -94,7 +101,42 @@ export const dataWarehouseSourceSettingsLogic = kea [s.source], + (source) => { + if (!source) { + return null + } + return SOURCE_DETAILS[source.source_type] + }, + ], + }), + forms(({ values, actions }) => ({ + sourceConfig: { + defaults: {} as Record, + submit: async ({ payload = {} }) => { + const newJobInputs = { + ...values.source?.job_inputs, + ...payload, + } + try { + const updatedSource = await api.externalDataSources.update(values.sourceId, { + job_inputs: newJobInputs, + }) + actions.loadSourceSuccess(updatedSource) + lemonToast.success('Source updated') + } catch (e: any) { + if (e.message) { + lemonToast.error(e.message) + } else { + lemonToast.error('Cant update source at this time') + } + } + }, + }, + })), listeners(({ values, actions, cache }) => ({ loadSourceSuccess: () => { clearTimeout(cache.sourceRefreshTimeout) @@ -126,7 +168,7 @@ export const dataWarehouseSourceSettingsLogic = kea { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.status = 'Running' clonedSource.schemas[schemaIndex].status = 'Running' @@ -147,7 +189,7 @@ export const dataWarehouseSourceSettingsLogic = kea { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.status = 'Running' clonedSource.schemas[schemaIndex].status = 'Running' diff --git a/frontend/src/scenes/debug/HogDebug.tsx b/frontend/src/scenes/debug/HogDebug.tsx index c2339d759218f..7e4f280793882 100644 --- a/frontend/src/scenes/debug/HogDebug.tsx +++ b/frontend/src/scenes/debug/HogDebug.tsx @@ -100,7 +100,7 @@ export function HogQueryEditor(props: HogQueryEditorProps): JSX.Element { interface HogDebugProps { queryKey: string query: HogQuery - setQuery?: (query: HogQuery) => void + setQuery: (query: HogQuery) => void debug?: boolean } diff --git a/frontend/src/scenes/error-tracking/ErrorTracking.stories.tsx b/frontend/src/scenes/error-tracking/ErrorTracking.stories.tsx index 348dc537034fd..2a97ca624ab98 100644 --- a/frontend/src/scenes/error-tracking/ErrorTracking.stories.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTracking.stories.tsx @@ -20,7 +20,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - '/api/projects/:team_id/query': async (req, res, ctx) => { + '/api/environments/:team_id/query': async (req, res, ctx) => { const query = (await req.clone().json()).query if (query.kind === NodeKind.ErrorTrackingQuery) { return res(ctx.json(errorTrackingQueryResponse)) diff --git a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx index 708facb73bd46..b7aa63829482b 100644 --- a/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/DistributionTable.tsx @@ -13,6 +13,7 @@ import { VariantScreenshot } from './VariantScreenshot' export function DistributionTable(): JSX.Element { const { experimentId, experiment, experimentResults } = useValues(experimentLogic) + const { reportExperimentReleaseConditionsViewed } = useActions(experimentLogic) const { openSidePanel } = useActions(sidePanelStateLogic) const columns: LemonTableColumns = [ @@ -60,7 +61,10 @@ export function DistributionTable(): JSX.Element {
} - onClick={() => openSidePanel(SidePanelTab.ExperimentFeatureFlag)} + onClick={() => { + openSidePanel(SidePanelTab.ExperimentFeatureFlag) + reportExperimentReleaseConditionsViewed(experiment.id) + }} type="secondary" size="xsmall" className="font-semibold" diff --git a/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx index 0f31b8929bf8b..5b9c8bac492bb 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ReleaseConditionsTable.tsx @@ -12,6 +12,7 @@ import { experimentLogic } from '../experimentLogic' export function ReleaseConditionsTable(): JSX.Element { const { experiment } = useValues(experimentLogic) + const { reportExperimentReleaseConditionsViewed } = useActions(experimentLogic) const { aggregationLabel } = useValues(groupsModel) const { openSidePanel } = useActions(sidePanelStateLogic) @@ -65,7 +66,10 @@ export function ReleaseConditionsTable(): JSX.Element {
} - onClick={() => openSidePanel(SidePanelTab.ExperimentFeatureFlag)} + onClick={() => { + openSidePanel(SidePanelTab.ExperimentFeatureFlag) + reportExperimentReleaseConditionsViewed(experiment.id) + }} type="secondary" size="xsmall" className="font-semibold" diff --git a/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx b/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx index c80bcd574ed0f..3580428d04175 100644 --- a/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/VariantScreenshot.tsx @@ -15,7 +15,7 @@ export function VariantScreenshot({ rolloutPercentage: number }): JSX.Element { const { experiment } = useValues(experimentLogic) - const { updateExperimentVariantImages } = useActions(experimentLogic) + const { updateExperimentVariantImages, reportExperimentVariantScreenshotUploaded } = useActions(experimentLogic) const [mediaId, setMediaId] = useState(experiment.parameters?.variant_screenshot_media_ids?.[variantKey] || null) const [isLoadingImage, setIsLoadingImage] = useState(true) @@ -30,6 +30,7 @@ export function VariantScreenshot({ [variantKey]: id, } updateExperimentVariantImages(updatedVariantImages) + reportExperimentVariantScreenshotUploaded(experiment.id) } }, onError: (detail) => { diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index 476ea2fa613fe..71e6c7f35dd25 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -360,7 +360,6 @@ export function PageHeaderCustom(): JSX.Element { launchExperiment, endExperiment, archiveExperiment, - setEditExperiment, loadExperimentResults, loadSecondaryMetricResults, createExposureCohort, @@ -374,9 +373,6 @@ export function PageHeaderCustom(): JSX.Element { <> {experiment && !isExperimentRunning && (
- setEditExperiment(true)}> - Edit - ([ 'reportExperimentReset', 'reportExperimentExposureCohortCreated', 'reportExperimentVariantShipped', + 'reportExperimentVariantScreenshotUploaded', + 'reportExperimentResultsLoadingTimeout', + 'reportExperimentReleaseConditionsViewed', ], insightDataLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }), ['setQuery'], @@ -775,6 +778,9 @@ export const experimentLogic = kea([ } } catch (error: any) { actions.setExperimentResultCalculationError({ detail: error.detail, statusCode: error.status }) + if (error.status === 504) { + actions.reportExperimentResultsLoadingTimeout(values.experimentId) + } return null } }, @@ -883,8 +889,12 @@ export const experimentLogic = kea([ }, { key: [Scene.Experiment, experimentId], - name: experiment?.name || 'New', - path: urls.experiment(experimentId || 'new'), + name: experiment?.name || '', + onRename: async (name: string) => { + // :KLUDGE: work around a type error when using asyncActions accessed via a callback passed to selectors() + const logic = experimentLogic({ experimentId }) + await logic.asyncActions.updateExperiment({ name }) + }, }, ], ], diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 4e219519fd837..c440c80286283 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -1,6 +1,6 @@ import './FeatureFlag.scss' -import { IconCollapse, IconExpand, IconPlus, IconTrash } from '@posthog/icons' +import { IconBalance, IconCollapse, IconExpand, IconPlus, IconTrash } from '@posthog/icons' import { LemonDialog, LemonSegmentedButton, LemonSkeleton, LemonSwitch } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' @@ -638,7 +638,7 @@ function UsageTab({ featureFlag }: { id: string; featureFlag: FeatureFlagType }) ) { enrichUsageDashboard() } - }, [dashboard]) + }, [dashboard, hasEnrichedAnalytics, enrichUsageDashboard]) const propertyFilter: AnyPropertyFilter[] = [ { @@ -950,9 +950,14 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {
-
+
Rollout - (Redistribute) + + +
{variants.map((variant, index) => ( @@ -1023,6 +1028,7 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { } } }} + suffix={%} /> {filterGroups.filter((group) => group.variant === variant.key) .length > 0 && ( diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx index 58266ce4c80b6..ccd0986e0a2af 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx @@ -245,6 +245,7 @@ export function FeatureFlagReleaseConditions({ : null } exactMatchFeatureFlagCohortOperators={true} + hideBehavioralCohorts={true} />
)} @@ -453,8 +454,17 @@ export function FeatureFlagReleaseConditions({ <>

Release conditions

- Specify the {aggregationTargetName} to which you want to release this flag. Note - that condition sets are rolled out independently of each other. + Specify {aggregationTargetName} for flag release. Condition sets roll out + independently. + {aggregationTargetName === 'users' && ( + <> + {' '} + Cohort-based targeting{' '} + + doesn't support dynamic behavioral cohorts. + {' '} + + )}
)} diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx index debe6ee739398..acf32b9788ed5 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlags.stories.tsx @@ -35,7 +35,7 @@ const meta: Meta = { ], }, post: { - '/api/projects/:team_id/query': {}, + '/api/environments/:team_id/query': {}, // flag targeting has loaders, make sure they don't keep loading '/api/projects/:team_id/feature_flags/user_blast_radius/': () => [ 200, diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 2f2933cf9335e..65026ccfd3453 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -16,6 +16,7 @@ import { experimentLogic } from 'scenes/experiments/experimentLogic' import { featureFlagsLogic, FeatureFlagsTab } from 'scenes/feature-flags/featureFlagsLogic' import { filterTrendsClientSideParams } from 'scenes/insights/sharedUtils' import { cleanFilters } from 'scenes/insights/utils/cleanFilters' +import { projectLogic } from 'scenes/projectLogic' import { Scene } from 'scenes/sceneTypes' import { NEW_SURVEY, NewSurvey } from 'scenes/surveys/constants' import { urls } from 'scenes/urls' @@ -37,6 +38,7 @@ import { FilterType, InsightModel, InsightType, + JsonType, MultivariateFlagOptions, MultivariateFlagVariant, NewEarlyAccessFeatureType, @@ -132,9 +134,11 @@ export const variantKeyToIndexFeatureFlagPayloads = (flag: FeatureFlagType): Fea return flag } - const newPayloads = {} + const newPayloads: Record = {} flag.filters.multivariate?.variants.forEach((variant, index) => { - newPayloads[index] = flag.filters.payloads?.[variant.key] + if (flag.filters.payloads?.[variant.key] !== undefined) { + newPayloads[index] = flag.filters.payloads[variant.key] + } }) return { ...flag, @@ -147,11 +151,10 @@ export const variantKeyToIndexFeatureFlagPayloads = (flag: FeatureFlagType): Fea const indexToVariantKeyFeatureFlagPayloads = (flag: Partial): Partial => { if (flag.filters?.multivariate) { - const newPayloads = {} - flag.filters?.multivariate?.variants.forEach(({ key }, index) => { - const payload = flag.filters?.payloads?.[index] - if (payload) { - newPayloads[key] = payload + const newPayloads: Record = {} + flag.filters.multivariate.variants.forEach(({ key }, index) => { + if (flag.filters?.payloads?.[index] !== undefined) { + newPayloads[key] = flag.filters.payloads[index] } }) return { @@ -186,6 +189,8 @@ export const featureFlagLogic = kea([ values: [ teamLogic, ['currentTeamId'], + projectLogic, + ['currentProjectId'], groupsModel, ['aggregationLabel'], userLogic, @@ -316,6 +321,22 @@ export const featureFlagLogic = kea([ } const variants = [...(state.filters.multivariate?.variants || [])] variants.splice(index, 1) + + const currentPayloads = { ...state.filters.payloads } + const newPayloads: Record = {} + + // TRICKY: In addition to modifying the variant array, we also need to shift the payload indices + // because the variant array is being modified and we need to make sure that the payloads object + // stays in sync with the variant array. + Object.keys(currentPayloads).forEach((key) => { + const payloadIndex = parseInt(key) + if (payloadIndex > index) { + newPayloads[payloadIndex - 1] = currentPayloads[payloadIndex] + } else if (payloadIndex < index) { + newPayloads[payloadIndex] = currentPayloads[payloadIndex] + } + }) + return { ...state, filters: { @@ -324,6 +345,7 @@ export const featureFlagLogic = kea([ ...state.filters.multivariate, variants, }, + payloads: newPayloads, }, } }, @@ -472,13 +494,16 @@ export const featureFlagLogic = kea([ try { let savedFlag: FeatureFlagType if (!updatedFlag.id) { - savedFlag = await api.create(`api/projects/${values.currentTeamId}/feature_flags`, preparedFlag) + savedFlag = await api.create( + `api/projects/${values.currentProjectId}/feature_flags`, + preparedFlag + ) if (values.roleBasedAccessEnabled && savedFlag.id) { featureFlagPermissionsLogic({ flagId: null })?.actions.addAssociatedRoles(savedFlag.id) } } else { savedFlag = await api.update( - `api/projects/${values.currentTeamId}/feature_flags/${updatedFlag.id}`, + `api/projects/${values.currentProjectId}/feature_flags/${updatedFlag.id}`, preparedFlag ) } @@ -525,7 +550,7 @@ export const featureFlagLogic = kea([ loadRelatedInsights: async () => { if (props.id && props.id !== 'new' && values.featureFlag.key) { const response = await api.get>( - `api/projects/${values.currentTeamId}/insights/?feature_flag=${values.featureFlag.key}&order=-created_at` + `api/environments/${values.currentProjectId}/insights/?feature_flag=${values.featureFlag.key}&order=-created_at` ) return response.results.map((legacyInsight) => getQueryBasedInsightModel(legacyInsight)) } @@ -636,7 +661,7 @@ export const featureFlagLogic = kea([ createScheduledChange: async () => { const { scheduledChangeOperation, scheduleDateMarker, currentTeamId, schedulePayload } = values - const fields = { + const fields: Record = { [ScheduledChangeOperationType.UpdateStatus]: 'active', [ScheduledChangeOperationType.AddReleaseCondition]: 'filters', } @@ -665,13 +690,13 @@ export const featureFlagLogic = kea([ })), listeners(({ actions, values, props }) => ({ submitNewDashboardSuccessWithResult: async ({ result }) => { - await api.update(`api/projects/${values.currentTeamId}/feature_flags/${values.featureFlag.id}`, { + await api.update(`api/projects/${values.currentProjectId}/feature_flags/${values.featureFlag.id}`, { analytics_dashboards: [result.id], }) }, generateUsageDashboard: async () => { if (props.id) { - await api.create(`api/projects/${values.currentTeamId}/feature_flags/${props.id}/dashboard`) + await api.create(`api/projects/${values.currentProjectId}/feature_flags/${props.id}/dashboard`) actions.loadFeatureFlag() } }, @@ -679,7 +704,7 @@ export const featureFlagLogic = kea([ if (props.id) { await breakpoint(1000) // in ms await api.create( - `api/projects/${values.currentTeamId}/feature_flags/${props.id}/enrich_usage_dashboard` + `api/projects/${values.currentProjectId}/feature_flags/${props.id}/enrich_usage_dashboard` ) } }, @@ -706,12 +731,13 @@ export const featureFlagLogic = kea([ const experimentId = currentPath.split('/').pop() if (experimentId) { + eventUsageLogic.actions.reportExperimentReleaseConditionsUpdated(parseInt(experimentId)) experimentLogic({ experimentId: parseInt(experimentId) }).actions.loadExperiment() } }, deleteFeatureFlag: async ({ featureFlag }) => { await deleteWithUndo({ - endpoint: `projects/${values.currentTeamId}/feature_flags`, + endpoint: `projects/${values.currentProjectId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: () => { featureFlag.id && actions.deleteFlag(featureFlag.id) @@ -733,7 +759,7 @@ export const featureFlagLogic = kea([ loadInsightAtIndex: async ({ index, filters }) => { if (filters) { const response = await api.get( - `api/projects/${values.currentTeamId}/insights/trend/?${toParams( + `api/environments/${values.currentProjectId}/insights/trend/?${toParams( filterTrendsClientSideParams(filters) )}` ) diff --git a/frontend/src/scenes/funnels/FunnelLineGraph.tsx b/frontend/src/scenes/funnels/FunnelLineGraph.tsx index 544b833326909..871a1c1e07520 100644 --- a/frontend/src/scenes/funnels/FunnelLineGraph.tsx +++ b/frontend/src/scenes/funnels/FunnelLineGraph.tsx @@ -87,6 +87,7 @@ export function FunnelLineGraph({ kind: NodeKind.FunnelsActorsQuery, source: querySource, funnelTrendsDropOff: false, + includeRecordings: true, funnelTrendsEntrancePeriodStart: dayjs(day).format('YYYY-MM-DD HH:mm:ss'), } openPersonsModal({ diff --git a/frontend/src/scenes/funnels/funnelPersonsModalLogic.test.ts b/frontend/src/scenes/funnels/funnelPersonsModalLogic.test.ts index c649ccb59c42e..73d469b842e07 100644 --- a/frontend/src/scenes/funnels/funnelPersonsModalLogic.test.ts +++ b/frontend/src/scenes/funnels/funnelPersonsModalLogic.test.ts @@ -22,7 +22,7 @@ describe('funnelPersonsModalLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/insights/': { + '/api/environments/:team_id/insights/': { results: [{}], }, }, diff --git a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.test.ts b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.test.ts index 0101a3c56e120..e2821b8499d92 100644 --- a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.test.ts +++ b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.test.ts @@ -29,10 +29,10 @@ describe('funnelPropertyCorrelationLogic', () => { correlation_config: correlationConfig, }, ], - '/api/projects/:team/insights/': { results: [{}] }, - '/api/projects/:team/insights/:id/': {}, + '/api/environments/:team_id/insights/': { results: [{}] }, + '/api/environments/:team_id/insights/:id/': {}, '/api/projects/:team/groups_types/': [], - '/api/projects/:team/persons/properties': [ + '/api/environments/:team_id/persons/properties': [ { name: 'some property', count: 20 }, { name: 'another property', count: 10 }, { name: 'third property', count: 5 }, @@ -59,7 +59,7 @@ describe('funnelPropertyCorrelationLogic', () => { ], }, post: { - '/api/projects/:team/insights/funnel/correlation': (req) => { + '/api/environments/:team_id/insights/funnel/correlation': (req) => { const data = req.body as any const excludePropertyFromProjectNames = data?.funnel_correlation_exclude_names || [] const includePropertyNames = data?.funnel_correlation_names || [] diff --git a/frontend/src/scenes/heatmaps/HeatmapsBrowser.stories.tsx b/frontend/src/scenes/heatmaps/HeatmapsBrowser.stories.tsx index e043ae3025e24..b89a5f653e429 100644 --- a/frontend/src/scenes/heatmaps/HeatmapsBrowser.stories.tsx +++ b/frontend/src/scenes/heatmaps/HeatmapsBrowser.stories.tsx @@ -21,7 +21,7 @@ const meta: Meta = { '/api/projects/:team_id/integrations': {}, }, post: { - '/api/projects/:team_id/query': async (req, res, ctx) => { + '/api/environments/:team_id/query': async (req, res, ctx) => { const qry = (await req.clone().json()).query.query // top urls query if (qry.startsWith('SELECT properties.$current_url AS url, count()')) { diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.stories.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.stories.tsx index 15e23154d2fef..8a0e5c91af302 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.stories.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.stories.tsx @@ -27,7 +27,7 @@ export default meta export const Empty: StoryFn = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.delay(100), ctx.status(200), ctx.json({ count: 1, results: [{ ...insight, result: [] }] }), @@ -43,12 +43,12 @@ export const Empty: StoryFn = () => { export const ServerError: StoryFn = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.delay(100), ctx.status(200), ctx.json({ count: 1, results: [{ ...insight, result: null }] }), ], - '/api/projects/:team_id/insights/:id': (_, __, ctx) => [ + '/api/environments/:team_id/insights/:id': (_, __, ctx) => [ ctx.delay(100), ctx.status(500), ctx.json({ @@ -67,14 +67,14 @@ export const ServerError: StoryFn = () => { export const ValidationError: StoryFn = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.delay(100), ctx.status(200), ctx.json({ count: 1, results: [{ ...insight, result: null }] }), ], }, post: { - '/api/projects/:team_id/insights/:id': (_, __, ctx) => [ + '/api/environments/:team_id/insights/:id': (_, __, ctx) => [ ctx.delay(100), ctx.status(400), ctx.json({ @@ -93,13 +93,13 @@ export const ValidationError: StoryFn = () => { export const EstimatedQueryExecutionTimeTooLong: StoryFn = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.status(200), ctx.json({ count: 1, results: [{ ...insight, result: null }] }), ], }, post: { - '/api/projects/:team_id/query/': (_, __, ctx) => [ + '/api/environments/:team_id/query/': (_, __, ctx) => [ ctx.delay(100), ctx.status(512), ctx.json({ @@ -124,13 +124,13 @@ EstimatedQueryExecutionTimeTooLong.parameters = { export const LongLoading: StoryFn = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.status(200), ctx.json({ count: 1, results: [{ ...insight, result: null }] }), ], }, post: { - '/api/projects/:team_id/query/': (_, __, ctx) => [ctx.delay('infinite')], + '/api/environments/:team_id/query/': (_, __, ctx) => [ctx.delay('infinite')], }, }) useEffect(() => { diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 95283388cb974..06f0928dc54f3 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -273,9 +273,11 @@ export function InsightErrorState({ excludeDetail, title, query, queryId }: Insi
)} -
- Query ID: {queryId} -
+ {queryId && ( +
+ Query ID: {queryId} +
+ )} {query && ( - {isObject(filtersOverride) && ( + {(isObject(filtersOverride) || isObject(variablesOverride)) && (
- You are viewing this insight with filters from a dashboard + + You are viewing this insight with{' '} + {isObject(variablesOverride) ? 'variables' : 'filters'} from a dashboard + - Discard dashboard filters + Discard dashboard {isObject(variablesOverride) ? 'variables' : 'filters'}
@@ -74,7 +81,7 @@ export function Insight({ insightId }: InsightSceneProps): JSX.Element {
diff --git a/frontend/src/scenes/insights/InsightNav/insightNavLogic.test.ts b/frontend/src/scenes/insights/InsightNav/insightNavLogic.test.ts index b5ddbdda5157a..958ba54f0cc63 100644 --- a/frontend/src/scenes/insights/InsightNav/insightNavLogic.test.ts +++ b/frontend/src/scenes/insights/InsightNav/insightNavLogic.test.ts @@ -28,12 +28,12 @@ describe('insightNavLogic', () => { beforeEach(async () => { useMocks({ get: { - '/api/projects/:team/insights/trend/': async () => { + '/api/environments/:team_id/insights/trend/': async () => { return [200, { result: ['result from api'] }] }, }, post: { - '/api/projects/:team/insights/funnel/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/funnel/': { result: ['result from api'] }, }, }) initKeaTests(true, { ...MOCK_DEFAULT_TEAM, test_account_filters_default_checked: true }) diff --git a/frontend/src/scenes/insights/Insights.stories.tsx b/frontend/src/scenes/insights/Insights.stories.tsx index c59e304f4be68..c97f692eb5bd0 100644 --- a/frontend/src/scenes/insights/Insights.stories.tsx +++ b/frontend/src/scenes/insights/Insights.stories.tsx @@ -24,8 +24,8 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/persons/retention': sampleRetentionPeopleResponse, - '/api/projects/:team_id/persons/properties': samplePersonProperties, + '/api/environments/:team_id/persons/retention': sampleRetentionPeopleResponse, + '/api/environments/:team_id/persons/properties': samplePersonProperties, '/api/projects/:team_id/groups_types': [], }, post: { diff --git a/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx b/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx index 0032cb2baafe6..819a35716abdd 100644 --- a/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx +++ b/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx @@ -16,7 +16,7 @@ export function createInsightStory( return function InsightStory() { useStorybookMocks({ get: { - '/api/projects/:team_id/insights/': (_, __, ctx) => [ + '/api/environments/:team_id/insights/': (_, __, ctx) => [ ctx.status(200), ctx.json({ count: 1, @@ -35,7 +35,7 @@ export function createInsightStory( ], }, post: { - '/api/projects/:team_id/query/': (req, __, ctx) => [ + '/api/environments/:team_id/query/': (req, __, ctx) => [ ctx.status(200), ctx.json({ cache_key: req.params.query, diff --git a/frontend/src/scenes/insights/insightDataLogic.test.ts b/frontend/src/scenes/insights/insightDataLogic.test.ts index 1d46ee553434d..11af2a186a279 100644 --- a/frontend/src/scenes/insights/insightDataLogic.test.ts +++ b/frontend/src/scenes/insights/insightDataLogic.test.ts @@ -20,7 +20,7 @@ describe('insightDataLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team_id/insights/trend': [], + '/api/environments/:team_id/insights/trend': [], }, }) initKeaTests() diff --git a/frontend/src/scenes/insights/insightDataLogic.tsx b/frontend/src/scenes/insights/insightDataLogic.tsx index e06b13c197592..168a9160bb966 100644 --- a/frontend/src/scenes/insights/insightDataLogic.tsx +++ b/frontend/src/scenes/insights/insightDataLogic.tsx @@ -32,6 +32,8 @@ export const insightDataLogic = kea([ dataNodeLogic({ key: insightVizDataNodeKey(props), loadPriority: props.loadPriority, + filtersOverride: props.filtersOverride, + variablesOverride: props.variablesOverride, } as DataNodeLogicProps), [ 'query as insightQuery', diff --git a/frontend/src/scenes/insights/insightLogic.test.ts b/frontend/src/scenes/insights/insightLogic.test.ts index 3eb7fa86bec3f..cdf3b53a54784 100644 --- a/frontend/src/scenes/insights/insightLogic.test.ts +++ b/frontend/src/scenes/insights/insightLogic.test.ts @@ -112,7 +112,7 @@ describe('insightLogic', () => { useMocks({ get: { '/api/projects/:team/tags': [], - '/api/projects/:team/insights/trend/': async (req) => { + '/api/environments/:team_id/insights/trend/': async (req) => { const clientQueryId = req.url.searchParams.get('client_query_id') if (clientQueryId !== null) { seenQueryIDs.push(clientQueryId) @@ -131,18 +131,18 @@ describe('insightLogic', () => { } return [200, { result: ['result from api'] }] }, - '/api/projects/:team/insights/path/': { result: ['result from api'] }, - '/api/projects/:team/insights/path': { result: ['result from api'] }, - '/api/projects/:team/insights/funnel/': { result: ['result from api'] }, - '/api/projects/:team/insights/retention/': { result: ['result from api'] }, - '/api/projects/:team/insights/43/': partialInsight43, - '/api/projects/:team/insights/44/': { + '/api/environments/:team_id/insights/path/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/path': { result: ['result from api'] }, + '/api/environments/:team_id/insights/funnel/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/retention/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/43/': partialInsight43, + '/api/environments/:team_id/insights/44/': { id: 44, short_id: Insight44, result: ['result 44'], filters: API_FILTERS, }, - '/api/projects/:team/insights/': (req) => { + '/api/environments/:team_id/insights/': (req) => { if (req.url.searchParams.get('saved')) { return [ 200, @@ -181,7 +181,7 @@ describe('insightLogic', () => { }, ] }, - '/api/projects/:team/dashboards/33/': { + '/api/environments/:team_id/dashboards/33/': { id: 33, filters: {}, tiles: [ @@ -198,7 +198,7 @@ describe('insightLogic', () => { }, ], }, - '/api/projects/:team/dashboards/34/': { + '/api/environments/:team_id/dashboards/34/': { id: 33, filters: {}, tiles: [ @@ -217,16 +217,16 @@ describe('insightLogic', () => { }, }, post: { - '/api/projects/:team/insights/funnel/': { result: ['result from api'] }, - '/api/projects/:team/insights/:id/viewed': [201], - '/api/projects/:team/insights/': (req) => [ + '/api/environments/:team_id/insights/funnel/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/:id/viewed': [201], + '/api/environments/:team_id/insights/': (req) => [ 200, { id: 12, short_id: Insight12, ...((req.body as any) || {}) }, ], - '/api/projects/997/insights/cancel/': [201], + '/api/environments/997/insights/cancel/': [201], }, patch: { - '/api/projects/:team/insights/:id': async (req) => { + '/api/environments/:team_id/insights/:id': async (req) => { const payload = await req.json() const response = patchResponseFor( payload, @@ -737,7 +737,7 @@ describe('insightLogic', () => { const mockCreateCalls = (api.create as jest.Mock).mock.calls expect(mockCreateCalls).toEqual([ [ - `api/projects/${MOCK_TEAM_ID}/insights`, + `api/environments/${MOCK_TEAM_ID}/insights`, expect.objectContaining({ derived_name: 'DataTableNode query', query: { diff --git a/frontend/src/scenes/insights/insightLogic.tsx b/frontend/src/scenes/insights/insightLogic.tsx index cc04937a42ad5..a299d639fee1d 100644 --- a/frontend/src/scenes/insights/insightLogic.tsx +++ b/frontend/src/scenes/insights/insightLogic.tsx @@ -21,7 +21,7 @@ import { dashboardsModel } from '~/models/dashboardsModel' import { groupsModel } from '~/models/groupsModel' import { insightsModel } from '~/models/insightsModel' import { tagsModel } from '~/models/tagsModel' -import { DashboardFilter, Node } from '~/queries/schema' +import { DashboardFilter, HogQLVariable, Node } from '~/queries/schema' import { InsightLogicProps, InsightShortId, ItemMode, QueryBasedInsightModel, SetInsightOptions } from '~/types' import { teamLogic } from '../teamLogic' @@ -77,9 +77,14 @@ export const insightLogic: LogicWrapper = kea ({ redirectToViewMode }), saveInsightSuccess: true, saveInsightFailure: true, - loadInsight: (shortId: InsightShortId, filtersOverride?: DashboardFilter | null) => ({ + loadInsight: ( + shortId: InsightShortId, + filtersOverride?: DashboardFilter | null, + variablesOverride?: Record | null + ) => ({ shortId, filtersOverride, + variablesOverride, }), updateInsight: (insightUpdate: Partial, callback?: () => void) => ({ insightUpdate, @@ -96,9 +101,15 @@ export const insightLogic: LogicWrapper = kea { + loadInsight: async ({ shortId, filtersOverride, variablesOverride }, breakpoint) => { await breakpoint(100) - const insight = await insightsApi.getByShortId(shortId, undefined, 'async', filtersOverride) + const insight = await insightsApi.getByShortId( + shortId, + undefined, + 'async', + filtersOverride, + variablesOverride + ) if (!insight) { throw new Error(`Insight with shortId ${shortId} not found`) @@ -417,7 +428,11 @@ export const insightLogic: LogicWrapper = kea { beforeEach(async () => { useMocks({ get: { - '/api/projects/:team/insights/trend/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/trend/': { result: ['result from api'] }, }, post: { - '/api/projects/:team/insights/funnel/': { result: ['result from api'] }, - '/api/projects/:team/insights/': (req) => [ + '/api/environments/:team_id/insights/funnel/': { result: ['result from api'] }, + '/api/environments/:team_id/insights/': (req) => [ 200, { id: 12, short_id: Insight12, ...((req.body as any) || {}) }, ], diff --git a/frontend/src/scenes/insights/insightSceneLogic.tsx b/frontend/src/scenes/insights/insightSceneLogic.tsx index e9b39ef503b59..3f79ace2432d9 100644 --- a/frontend/src/scenes/insights/insightSceneLogic.tsx +++ b/frontend/src/scenes/insights/insightSceneLogic.tsx @@ -19,7 +19,7 @@ import { ActivityFilters } from '~/layout/navigation-3000/sidepanel/panels/activ import { cohortsModel } from '~/models/cohortsModel' import { groupsModel } from '~/models/groupsModel' import { getDefaultQuery } from '~/queries/nodes/InsightViz/utils' -import { DashboardFilter, Node } from '~/queries/schema' +import { DashboardFilter, HogQLVariable, Node } from '~/queries/schema' import { ActivityScope, Breadcrumb, DashboardType, InsightShortId, InsightType, ItemMode } from '~/types' import { insightDataLogic } from './insightDataLogic' @@ -50,9 +50,10 @@ export const insightSceneLogic = kea([ insightMode: ItemMode, itemId: string | undefined, alertId: AlertType['id'] | undefined, + filtersOverride: DashboardFilter | undefined, + variablesOverride: Record | undefined, dashboardId: DashboardType['id'] | undefined, - dashboardName: DashboardType['name'] | undefined, - filtersOverride: DashboardFilter | undefined + dashboardName: DashboardType['name'] | undefined ) => ({ insightId, insightMode, @@ -61,6 +62,7 @@ export const insightSceneLogic = kea([ dashboardId, dashboardName, filtersOverride, + variablesOverride, }), setInsightLogicRef: (logic: BuiltLogic | null, unmount: null | (() => void)) => ({ logic, @@ -122,6 +124,13 @@ export const insightSceneLogic = kea([ setSceneState: (_, { filtersOverride }) => (filtersOverride !== undefined ? filtersOverride : null), }, ], + variablesOverride: [ + null as null | Record, + { + setSceneState: (_, { variablesOverride }) => + variablesOverride !== undefined ? variablesOverride : null, + }, + ], insightLogicRef: [ null as null | { logic: BuiltLogic @@ -222,7 +231,11 @@ export const insightSceneLogic = kea([ const oldRef = values.insightLogicRef // free old logic after mounting new one const oldRef2 = values.insightDataLogicRef // free old logic after mounting new one if (insightId) { - const insightProps = { dashboardItemId: insightId, filtersOverride: values.filtersOverride } + const insightProps = { + dashboardItemId: insightId, + filtersOverride: values.filtersOverride, + variablesOverride: values.variablesOverride, + } const logic = insightLogic.build(insightProps) const unmount = logic.mount() @@ -242,7 +255,11 @@ export const insightSceneLogic = kea([ oldRef2.unmount() } } else if (insightId) { - values.insightLogicRef?.logic.actions.loadInsight(insightId as InsightShortId, values.filtersOverride) + values.insightLogicRef?.logic.actions.loadInsight( + insightId as InsightShortId, + values.filtersOverride, + values.variablesOverride + ) } }, })), @@ -294,18 +311,20 @@ export const insightSceneLogic = kea([ insightMode !== values.insightMode || itemId !== values.itemId || alert_id !== values.alertId || + !objectsEqual(searchParams['variables_override'], values.variablesOverride) || + !objectsEqual(filtersOverride, values.filtersOverride) || dashboard !== values.dashboardId || - dashboardName !== values.dashboardName || - !objectsEqual(filtersOverride, values.filtersOverride) + dashboardName !== values.dashboardName ) { actions.setSceneState( insightId, insightMode, itemId, alert_id, + filtersOverride, + searchParams['variables_override'], dashboard, - dashboardName, - filtersOverride + dashboardName ) } diff --git a/frontend/src/scenes/insights/insightUsageLogic.ts b/frontend/src/scenes/insights/insightUsageLogic.ts index 1c1ae8140df08..2b2a2517c0969 100644 --- a/frontend/src/scenes/insights/insightUsageLogic.ts +++ b/frontend/src/scenes/insights/insightUsageLogic.ts @@ -3,7 +3,7 @@ import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { objectsEqual } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' import { insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz' @@ -23,6 +23,8 @@ export const insightUsageLogic = kea([ path((key) => ['scenes', 'insights', 'insightUsageLogic', key]), connect((props: InsightLogicProps) => ({ values: [ + projectLogic, + ['currentProjectId'], insightLogic(props), ['insight'], dataNodeLogic({ key: insightVizDataNodeKey(props) } as DataNodeLogicProps), @@ -58,7 +60,7 @@ export const insightUsageLogic = kea([ // Report the insight being viewed to our '/viewed' endpoint. Used for "recently viewed insights". if (values.insight.id) { - void api.create(`api/projects/${teamLogic.values.currentTeamId}/insights/${values.insight.id}/viewed`) + void api.create(`api/environments/${values.currentProjectId}/insights/${values.insight.id}/viewed`) } // Debounce to avoid noisy events from the query changing multiple times. diff --git a/frontend/src/scenes/insights/insightVizDataLogic.test.ts b/frontend/src/scenes/insights/insightVizDataLogic.test.ts index c3bc313e815a6..266fe56061263 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.test.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.test.ts @@ -22,8 +22,8 @@ describe('insightVizDataLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team_id/insights/trend': [], - '/api/projects/:team_id/insights/': { results: [{}] }, + '/api/environments/:team_id/insights/trend': [], + '/api/environments/:team_id/insights/': { results: [{}] }, }, }) initKeaTests() diff --git a/frontend/src/scenes/insights/utils.tsx b/frontend/src/scenes/insights/utils.tsx index e9dc44767d875..96d3129e47fa6 100644 --- a/frontend/src/scenes/insights/utils.tsx +++ b/frontend/src/scenes/insights/utils.tsx @@ -137,7 +137,7 @@ export async function getInsightId(shortId: InsightShortId): Promise | null ): Promise { - const legacyInsights = await api.insights.loadInsight(shortId, basic, refresh, filtersOverride) + const legacyInsights = await api.insights.loadInsight( + shortId, + basic, + refresh, + filtersOverride, + variablesOverride + ) if (legacyInsights.results.length === 0) { return null } diff --git a/frontend/src/scenes/insights/utils/queryUtils.ts b/frontend/src/scenes/insights/utils/queryUtils.ts index abfc46ba28eda..5a8659a276c2b 100644 --- a/frontend/src/scenes/insights/utils/queryUtils.ts +++ b/frontend/src/scenes/insights/utils/queryUtils.ts @@ -17,8 +17,21 @@ import { ChartDisplayType } from '~/types' type CompareQueryOpts = { ignoreVisualizationOnlyChanges: boolean } export const getVariablesFromQuery = (query: string): string[] => { - const queryVariableMatches = /\{variables\.([a-z0-9_]+)\}/gm.exec(query) - return (queryVariableMatches ?? []).filter(Boolean) + const re = /\{variables\.([a-z0-9_]+)\}/gm + const results: string[] = [] + + for (;;) { + const reResult = re.exec(query) + if (!reResult) { + break + } + + if (reResult[1]) { + results.push(reResult[1]) + } + } + + return results } export const compareQuery = (a: Node, b: Node, opts?: CompareQueryOpts): boolean => { @@ -41,9 +54,8 @@ export const haveVariablesOrFiltersChanged = (a: Node, b: Node): boolean => { return false } - // If neither queries use variables, then don't submit the query when variables change - if (!getVariablesFromQuery(a.query).length && !getVariablesFromQuery(b.query).length) { - return false + if ((a.variables && !b.variables) || (!a.variables && b.variables)) { + return true } if (a.variables && b.variables) { diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx index 4ea4ee6f1ace9..fd5424be96ec1 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx +++ b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx @@ -20,7 +20,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - 'api/projects/:team_id/insights/funnel/correlation/': funnelCorrelation, + 'api/environments/:team_id/insights/funnel/correlation/': funnelCorrelation, }, }), ], diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx index 5f7275b4b8818..4bff8e9a36356 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx +++ b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx @@ -21,7 +21,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - 'api/projects/:team_id/insights/funnel/correlation/': funnelCorrelation, + 'api/environments/:team_id/insights/funnel/correlation/': funnelCorrelation, }, }), taxonomicFilterMocksDecorator, diff --git a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx index fab835e6c5cff..007ad83541304 100644 --- a/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx +++ b/frontend/src/scenes/insights/views/LineGraph/LineGraph.tsx @@ -2,7 +2,7 @@ import 'chartjs-adapter-dayjs-3' import { LegendOptions } from 'chart.js' import { DeepPartial } from 'chart.js/dist/types/utils' -import annotationPlugin, { AnnotationPluginOptions } from 'chartjs-plugin-annotation' +import annotationPlugin, { AnnotationOptions } from 'chartjs-plugin-annotation' import ChartDataLabels from 'chartjs-plugin-datalabels' import ChartjsPluginStacked100, { ExtendedChartData } from 'chartjs-plugin-stacked100' import clsx from 'clsx' @@ -279,7 +279,7 @@ export function LineGraph_({ hideYAxis, legend = { display: false }, yAxisScaleType, - alertLines = [], + alertLines, }: LineGraphProps): JSX.Element { let datasets = _datasets @@ -397,8 +397,8 @@ export function LineGraph_({ } } - const annotations = alertLines.reduce((acc, { value }, idx) => { - acc[`${idx}`] = { + const annotations = (alertLines || []).reduce((acc, { value }, idx) => { + acc[idx] = { type: 'line', yMin: value, yMax: value, @@ -408,7 +408,7 @@ export function LineGraph_({ } return acc - }, {} as AnnotationPluginOptions['annotations']) + }, {} as Record) datasets = datasets.map(processDataset) diff --git a/frontend/src/scenes/max/Max.stories.tsx b/frontend/src/scenes/max/Max.stories.tsx index 1e9761f352370..65106d4ae4420 100644 --- a/frontend/src/scenes/max/Max.stories.tsx +++ b/frontend/src/scenes/max/Max.stories.tsx @@ -13,7 +13,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - '/api/projects/:team_id/query/chat/': chatResponse, + '/api/environments/:team_id/query/chat/': chatResponse, }, }), ], @@ -86,7 +86,7 @@ export const Thread: StoryFn = () => { export const EmptyThreadLoading: StoryFn = () => { useStorybookMocks({ post: { - '/api/projects/:team_id/query/chat/': (_req, _res, ctx) => [ctx.delay('infinite')], + '/api/environments/:team_id/query/chat/': (_req, _res, ctx) => [ctx.delay('infinite')], }, }) diff --git a/frontend/src/scenes/max/Thread.tsx b/frontend/src/scenes/max/Thread.tsx index 362c2a74b473f..dabc0cae374ca 100644 --- a/frontend/src/scenes/max/Thread.tsx +++ b/frontend/src/scenes/max/Thread.tsx @@ -1,14 +1,18 @@ -import { LemonButton, Spinner } from '@posthog/lemon-ui' +import { IconThumbsDown, IconThumbsDownFilled, IconThumbsUp, IconThumbsUpFilled, IconX } from '@posthog/icons' +import { LemonButton, LemonInput, Spinner } from '@posthog/lemon-ui' import clsx from 'clsx' import { useValues } from 'kea' +import { BreakdownSummary, PropertiesSummary, SeriesSummary } from 'lib/components/Cards/InsightCard/InsightDetails' +import { TopHeading } from 'lib/components/Cards/InsightCard/TopHeading' import { IconOpenInNew } from 'lib/lemon-ui/icons' -import React from 'react' +import posthog from 'posthog-js' +import React, { useRef, useState } from 'react' import { urls } from 'scenes/urls' import { Query } from '~/queries/Query/Query' import { InsightQueryNode, InsightVizNode, NodeKind } from '~/queries/schema' -import { maxLogic } from './maxLogic' +import { maxLogic, ThreadMessage, TrendGenerationResult } from './maxLogic' export function Thread(): JSX.Element | null { const { thread, threadLoading } = useValues(maxLogic) @@ -28,42 +32,12 @@ export function Thread(): JSX.Element | null { ) } - const query = { - kind: NodeKind.InsightVizNode, - source: message.content?.answer, - } - return ( - - {message.content?.reasoning_steps && ( - -
    - {message.content.reasoning_steps.map((step, index) => ( -
  • {step}
  • - ))} -
-
- )} - {message.status === 'completed' && message.content?.answer && ( - -
- -
- } - targetBlank - > - Open as new insight - -
- )} -
+ ) })} {threadLoading && ( @@ -78,14 +52,185 @@ export function Thread(): JSX.Element | null { ) } -function Message({ - role, - children, - className, -}: React.PropsWithChildren<{ role: 'user' | 'assistant'; className?: string }>): JSX.Element { +const Message = React.forwardRef< + HTMLDivElement, + React.PropsWithChildren<{ role: 'user' | 'assistant'; className?: string }> +>(function Message({ role, children, className }, ref): JSX.Element { if (role === 'user') { - return

{children}

+ return ( +
+ {children} +
+ ) } - return
{children}
+ return ( +
+ {children} +
+ ) +}) + +function Answer({ + message, + previousMessage, +}: { + message: ThreadMessage & { content: TrendGenerationResult } + previousMessage: ThreadMessage +}): JSX.Element { + const query: InsightVizNode = { + kind: NodeKind.InsightVizNode, + source: message.content?.answer as InsightQueryNode, + showHeader: true, + } + + return ( + <> + {message.content?.reasoning_steps && ( + +
    + {message.content.reasoning_steps.map((step, index) => ( +
  • {step}
  • + ))} +
+
+ )} + {message.status === 'completed' && message.content?.answer && ( + <> + +
+ +
+
+ } + size="xsmall" + targetBlank + className="absolute right-0 -top-px" + > + Open as new insight + + } /> +
+ + +
+
+
+ + + )} + + ) +} + +function AnswerActions({ + message, + previousMessage, +}: { + message: ThreadMessage & { content: TrendGenerationResult } + previousMessage: ThreadMessage +}): JSX.Element { + const [rating, setRating] = useState<'good' | 'bad' | null>(null) + const [feedback, setFeedback] = useState('') + const [feedbackInputStatus, setFeedbackInputStatus] = useState<'hidden' | 'pending' | 'submitted'>('hidden') + const hasScrolledFeedbackInputIntoView = useRef(false) + + function submitRating(newRating: 'good' | 'bad'): void { + if (rating) { + return // Already rated + } + setRating(newRating) + posthog.capture('chat rating', { + question: previousMessage.content, + answer: message.content, + answer_rating: rating, + }) + if (newRating === 'bad') { + setFeedbackInputStatus('pending') + } + } + + function submitFeedback(): void { + if (!feedback) { + return // Input is empty + } + posthog.capture('chat feedback', { + question: previousMessage.content, + answer: message.content, + feedback, + }) + setFeedbackInputStatus('submitted') + } + + return ( + <> +
+ {rating !== 'bad' && ( + : } + type="tertiary" + size="small" + tooltip="Good answer" + onClick={() => submitRating('good')} + /> + )} + {rating !== 'good' && ( + : } + type="tertiary" + size="small" + tooltip="Bad answer" + onClick={() => submitRating('bad')} + /> + )} +
+ {feedbackInputStatus !== 'hidden' && ( + { + if (el && !hasScrolledFeedbackInputIntoView.current) { + // When the feedback input is first rendered, scroll it into view + el.scrollIntoView({ behavior: 'smooth' }) + hasScrolledFeedbackInputIntoView.current = true + } + }} + > +
+

+ {feedbackInputStatus === 'pending' + ? 'What disappointed you about the answer?' + : 'Thank you for your feedback!'} +

+ } + type="tertiary" + size="xsmall" + onClick={() => setFeedbackInputStatus('hidden')} + /> +
+ {feedbackInputStatus === 'pending' && ( +
+ setFeedback(newValue)} + onPressEnter={() => submitFeedback()} + autoFocus + /> + submitFeedback()} + disabledReason={!feedback ? 'Please type a few words!' : undefined} + > + Submit + +
+ )} +
+ )} + + ) } diff --git a/frontend/src/scenes/max/maxLogic.ts b/frontend/src/scenes/max/maxLogic.ts index 8d23d41f609a6..be0ca6e22130c 100644 --- a/frontend/src/scenes/max/maxLogic.ts +++ b/frontend/src/scenes/max/maxLogic.ts @@ -11,7 +11,7 @@ export interface MaxLogicProps { sessionId: string } -interface TrendGenerationResult { +export interface TrendGenerationResult { reasoning_steps?: string[] answer?: ExperimentalAITrendsQuery } diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx index ed8f28eea785d..be59069b7d665 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeQuery.tsx @@ -2,7 +2,7 @@ import { Query } from '~/queries/Query/Query' import { DataTableNode, InsightQueryNode, InsightVizNode, NodeKind, QuerySchema } from '~/queries/schema' import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' import { InsightLogicProps, InsightShortId, NotebookNodeType } from '~/types' -import { useActions, useMountedLogic, useValues } from 'kea' +import { BindLogic, useActions, useMountedLogic, useValues } from 'kea' import { useEffect, useMemo } from 'react' import { notebookNodeLogic } from './notebookNodeLogic' import { NotebookNodeProps, NotebookNodeAttributeProperties } from '../Notebook/utils' @@ -35,9 +35,11 @@ const Component = ({ const { expanded } = useValues(nodeLogic) const { setTitlePlaceholder } = useActions(nodeLogic) const summarizeInsight = useSummarizeInsight() - const { insightName } = useValues( - insightLogic({ dashboardItemId: query.kind === NodeKind.SavedInsightNode ? query.shortId : 'new' }) - ) + + const insightLogicProps = { + dashboardItemId: query.kind === NodeKind.SavedInsightNode ? query.shortId : ('new' as const), + } + const { insightName } = useValues(insightLogic(insightLogicProps)) useEffect(() => { let title = 'Query' @@ -96,19 +98,21 @@ const Component = ({ return (
- { - updateAttributes({ - query: { - ...attributes.query, - source: (t as DataTableNode | InsightVizNode).source, - } as QuerySchema, - }) - }} - /> + + { + updateAttributes({ + query: { + ...attributes.query, + source: (t as DataTableNode | InsightVizNode).source, + } as QuerySchema, + }) + }} + /> +
) } @@ -132,6 +136,7 @@ export const Settings = ({ modifiedQuery.showResultsTable = false modifiedQuery.showReload = true + modifiedQuery.showExport = true modifiedQuery.showElapsedTime = false modifiedQuery.showTimings = false diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx b/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx index e67b51d7ad639..9b760fe72eb86 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.stories.tsx @@ -206,7 +206,7 @@ const meta: Meta = { decorators: [ mswDecorator({ post: { - 'api/projects/:team_id/query': { + 'api/environments/:team_id/query': { clickhouse: "SELECT nullIf(nullIf(events.`$session_id`, ''), 'null') AS session_id, any(events.properties) AS properties FROM events WHERE and(equals(events.team_id, 1), in(events.event, [%(hogql_val_0)s, %(hogql_val_1)s]), ifNull(in(session_id, [%(hogql_val_2)s]), 0), ifNull(greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_3)s), %(hogql_val_4)s), 0), ifNull(lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_5)s), %(hogql_val_6)s), 0)) GROUP BY session_id LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=True", columns: ['session_id', 'properties'], @@ -275,7 +275,7 @@ const meta: Meta = { ], }, 'api/projects/:team_id/notebooks/12345': notebook12345Json, - 'api/projects/:team_id/session_recordings': { + 'api/environments/:team_id/session_recordings': { results: [ { id: '018a8a51-a39d-7b18-897f-94054eec5f61', diff --git a/frontend/src/scenes/onboarding/onboardingLogic.tsx b/frontend/src/scenes/onboarding/onboardingLogic.tsx index d997ed737197c..448b3e154f5ea 100644 --- a/frontend/src/scenes/onboarding/onboardingLogic.tsx +++ b/frontend/src/scenes/onboarding/onboardingLogic.tsx @@ -119,7 +119,7 @@ export const onboardingLogic = kea([ props({} as OnboardingLogicProps), path(['scenes', 'onboarding', 'onboardingLogic']), // connect this so we start collecting live events the whole time during onboarding - connect(liveEventsTableLogic), + connect(liveEventsTableLogic({ showLiveStreamErrorToast: false })), connect({ values: [ billingLogic, diff --git a/frontend/src/scenes/persons/personsLogic.test.ts b/frontend/src/scenes/persons/personsLogic.test.ts index 6e8127842f135..602ff71a2992b 100644 --- a/frontend/src/scenes/persons/personsLogic.test.ts +++ b/frontend/src/scenes/persons/personsLogic.test.ts @@ -15,7 +15,7 @@ describe('personsLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team_id/persons/': (req) => { + '/api/environments/:team_id/persons/': (req) => { if (['+', 'abc', 'xyz'].includes(req.url.searchParams.get('distinct_id') ?? '')) { return [200, { results: ['person from api'] }] } @@ -103,7 +103,10 @@ describe('personsLogic', () => { await expectLogic(logic, () => { logic.actions.loadPerson('+') // has encoded from + in the action to %2B in the API call - expect(api.get).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/persons?distinct_id=%2B`, undefined) + expect(api.get).toHaveBeenCalledWith( + `api/environments/${MOCK_TEAM_ID}/persons?distinct_id=%2B`, + undefined + ) }) .toDispatchActions(['loadPerson', 'loadPersonSuccess']) .toMatchValues({ diff --git a/frontend/src/scenes/pipeline/BatchExportRuns.tsx b/frontend/src/scenes/pipeline/BatchExportRuns.tsx index 14fcb7812abf5..134fb4ccab8b8 100644 --- a/frontend/src/scenes/pipeline/BatchExportRuns.tsx +++ b/frontend/src/scenes/pipeline/BatchExportRuns.tsx @@ -6,7 +6,7 @@ import { useActions, useValues } from 'kea' import { DateFilter } from 'lib/components/DateFilter/DateFilter' import { NotFound } from 'lib/components/NotFound' import { PageHeader } from 'lib/components/PageHeader' -import { IconRefresh } from 'lib/lemon-ui/icons' +import { IconCancel, IconRefresh } from 'lib/lemon-ui/icons' import { BatchExportConfiguration, BatchExportRun, GroupedBatchExportRuns } from '~/types' @@ -92,7 +92,7 @@ function BatchExportLatestRuns({ id }: BatchExportRunsLogicProps): JSX.Element { const logic = batchExportRunsLogic({ id }) const { batchExportConfig, latestRuns, loading, hasMoreRunsToLoad } = useValues(logic) - const { openBackfillModal, loadOlderRuns, retryRun } = useActions(logic) + const { openBackfillModal, loadOlderRuns, retryRun, cancelRun } = useActions(logic) const { canEnableNewDestinations } = useValues(pipelineAccessLogic) if (!batchExportConfig) { @@ -165,7 +165,12 @@ function BatchExportLatestRuns({ id }: BatchExportRunsLogicProps): JSX.Element { width: 0, render: function RenderActions(_, run) { if (canEnableNewDestinations) { - return + return ( +
+ + +
+ ) } }, }, @@ -362,6 +367,41 @@ function RunRetryButton({ run, retryRun }: { run: any; retryRun: any }): JSX.Ele ) } +function RunCancelButton({ run, cancelRun }: { run: BatchExportRun; cancelRun: any }): JSX.Element { + return ( + + } + disabledReason={ + run.status === 'Running' || run.status === 'Starting' + ? null + : `Cannot cancel as run is '${run.status}'` + } + onClick={() => + LemonDialog.open({ + title: 'Cancel run?', + description: ( + <> +

This will cancel the selected backfill run.

+ + ), + width: '20rem', + primaryButton: { + children: 'Cancel run', + onClick: () => cancelRun(run), + }, + secondaryButton: { + children: 'Go back', + }, + }) + } + /> +
+ ) +} + export function BatchExportRunIcon({ runs, showLabel = false, diff --git a/frontend/src/scenes/pipeline/Pipeline.stories.tsx b/frontend/src/scenes/pipeline/Pipeline.stories.tsx index ce45d28639c7c..ac01c5b62cd53 100644 --- a/frontend/src/scenes/pipeline/Pipeline.stories.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.stories.tsx @@ -55,8 +55,8 @@ export default { '/api/organizations/:organization_id/plugins/repository': [], '/api/organizations/:organization_id/plugins/unused': [], '/api/organizations/:organization_id/plugins/:id': pluginRetrieveMock, - '/api/projects/:team_id/plugin_configs/': pluginConfigs, - '/api/projects/:team_id/plugin_configs/:id': pluginConfigRetrieveMock, + '/api/environments/:team_id/plugin_configs/': pluginConfigs, + '/api/environments/:team_id/plugin_configs/:id': pluginConfigRetrieveMock, // TODO: Differentiate between transformation and destination mocks for nicer mocks '/api/organizations/:organization_id/pipeline_transformations/': plugins, '/api/projects/:team_id/pipeline_transformation_configs/': pluginConfigs, @@ -278,7 +278,7 @@ export function PipelineNodeMetricsErrorModal(): JSX.Element { export function PipelineNodeLogs(): JSX.Element { useStorybookMocks({ get: { - '/api/projects/:team_id/plugin_configs/:plugin_config_id/logs': require('./__mocks__/pluginLogs.json'), + '/api/environments/:team_id/plugin_configs/:plugin_config_id/logs': require('./__mocks__/pluginLogs.json'), }, }) useEffect(() => { @@ -290,7 +290,7 @@ export function PipelineNodeLogs(): JSX.Element { export function PipelineNodeLogsBatchExport(): JSX.Element { useStorybookMocks({ get: { - '/api/projects/:team_id/batch_exports/:export_id/logs': require('./__mocks__/batchExportLogs.json'), + '/api/environments/:team_id/batch_exports/:export_id/logs': require('./__mocks__/batchExportLogs.json'), }, }) useEffect(() => { diff --git a/frontend/src/scenes/pipeline/Pipeline.tsx b/frontend/src/scenes/pipeline/Pipeline.tsx index 40c8147158db6..5e689fea80e56 100644 --- a/frontend/src/scenes/pipeline/Pipeline.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.tsx @@ -39,7 +39,12 @@ export function Pipeline(): JSX.Element { tabs.push({ key: PipelineTab.AppsManagement, content: }) } - tabs.push({ key: PipelineTab.History, content: }) + tabs.push({ + key: PipelineTab.History, + content: ( + + ), + }) return (
diff --git a/frontend/src/scenes/pipeline/PipelineNode.tsx b/frontend/src/scenes/pipeline/PipelineNode.tsx index 44c2340da9298..4500010b99684 100644 --- a/frontend/src/scenes/pipeline/PipelineNode.tsx +++ b/frontend/src/scenes/pipeline/PipelineNode.tsx @@ -5,6 +5,7 @@ import { PageHeader } from 'lib/components/PageHeader' import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs/LemonTabs' import { capitalizeFirstLetter } from 'lib/utils' import { Schemas } from 'scenes/data-warehouse/settings/source/Schemas' +import { SourceConfiguration } from 'scenes/data-warehouse/settings/source/SourceConfiguration' import { Syncs } from 'scenes/data-warehouse/settings/source/Syncs' import { PipelineNodeLogs } from 'scenes/pipeline/PipelineNodeLogs' import { SceneExport } from 'scenes/sceneTypes' @@ -56,12 +57,12 @@ export function PipelineNode(params: { stage?: string; id?: string } = {}): JSX. if (!stage) { return } - const tabToContent: Partial> = node.backend === PipelineBackend.ManagedSource ? { [PipelineNodeTab.Schemas]: , [PipelineNodeTab.Syncs]: , + [PipelineNodeTab.SourceConfiguration]: , } : { [PipelineNodeTab.Configuration]: , @@ -82,6 +83,15 @@ export function PipelineNode(params: { stage?: string; id?: string } = {}): JSX. tabToContent[PipelineNodeTab.History] = } + if (node.backend === PipelineBackend.HogFunction) { + tabToContent[PipelineNodeTab.History] = ( + + ) + } + return ( <> diff --git a/frontend/src/scenes/pipeline/batchExportRunsLogic.tsx b/frontend/src/scenes/pipeline/batchExportRunsLogic.tsx index a77914fe4cc4e..d9131b91f8729 100644 --- a/frontend/src/scenes/pipeline/batchExportRunsLogic.tsx +++ b/frontend/src/scenes/pipeline/batchExportRunsLogic.tsx @@ -36,6 +36,7 @@ export const batchExportRunsLogic = kea([ switchLatestRuns: (enabled: boolean) => ({ enabled }), loadRuns: true, retryRun: (run: BatchExportRun) => ({ run }), + cancelRun: (run: BatchExportRun) => ({ run }), openBackfillModal: true, closeBackfillModal: true, }), @@ -239,6 +240,10 @@ export const batchExportRunsLogic = kea([ await api.batchExports.retryRun(props.id, run.id) lemonToast.success('Retry has been scheduled.') }, + cancelRun: async ({ run }) => { + await api.batchExports.cancelRun(props.id, run.id) + lemonToast.success('Run has been cancelled.') + }, })), afterMount(({ actions }) => { actions.loadRuns() diff --git a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx index 38fa9a80270a9..3d13151059182 100644 --- a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx @@ -109,7 +109,7 @@ export const pipelineDestinationsLogic = kea([ deleteNodeWebhook: async ({ destination }) => { await deleteWithUndo({ - endpoint: `projects/${teamLogic.values.currentTeamId}/plugin_configs`, + endpoint: `environments/${teamLogic.values.currentTeamId}/plugin_configs`, object: { id: destination.id, name: destination.name, diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx index e0569c8157229..e16f5cadedc7a 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx @@ -13,6 +13,7 @@ import { Link, SpinnerOverlay, } from '@posthog/lemon-ui' +import clsx from 'clsx' import { BindLogic, useActions, useValues } from 'kea' import { Form } from 'kea-forms' import { NotFound } from 'lib/components/NotFound' @@ -340,89 +341,97 @@ export function HogFunctionConfiguration({ templateId, id }: { templateId?: stri
- {showSource ? ( - <> - } - size="small" - type="secondary" - className="my-4" - onClick={() => { - setConfigurationValue('inputs_schema', [ - ...(configuration.inputs_schema ?? []), - { - type: 'string', - key: `input_${ - (configuration.inputs_schema?.length ?? 0) + 1 - }`, - label: '', - required: false, - }, - ]) - }} - > - Add input variable - - - {({ value, onChange }) => ( - <> -
- Function source code - setShowSource(false)} - > - Hide source code - -
- - This is the underlying Hog code that will run whenever the - filters match.{' '} - See the docs{' '} - for more info - - onChange(v ?? '')} - globals={globalsWithInputs} - options={{ - minimap: { - enabled: false, - }, - wordWrap: 'on', - scrollBeyondLastLine: false, - automaticLayout: true, - fixedOverflowWidgets: true, - suggest: { - showInlineDetails: true, - }, - quickSuggestionsDelay: 300, - }} - /> - - )} -
- + } + size="small" + type="secondary" + className="my-4" + onClick={() => { + setConfigurationValue('inputs_schema', [ + ...(configuration.inputs_schema ?? []), + { + type: 'string', + key: `input_${(configuration.inputs_schema?.length ?? 0) + 1}`, + label: '', + required: false, + }, + ]) + }} + > + Add input variable + + ) : null} +
+
+ +
+
+
+

Edit source

+ {!showSource ?

Click here to edit the function's source code

: null} +
+ + {!showSource ? ( + setShowSource(true)} + disabledReason={ + !hasAddon + ? 'Editing the source code requires the Data Pipelines addon' + : undefined + } + > + Edit source code + ) : ( -
- setShowSource(true)} - disabledReason={ - !hasAddon - ? 'Editing the source code requires the Data Pipelines addon' - : undefined - } - > - Show function source code - -
+ setShowSource(false)} + > + Hide source code + )}
+ + {showSource ? ( + + {({ value, onChange }) => ( + <> + + This is the underlying Hog code that will run whenever the filters + match. See the docs{' '} + for more info + + onChange(v ?? '')} + globals={globalsWithInputs} + options={{ + minimap: { + enabled: false, + }, + wordWrap: 'on', + scrollBeyondLastLine: false, + automaticLayout: true, + fixedOverflowWidgets: true, + suggest: { + showInlineDetails: true, + }, + quickSuggestionsDelay: 300, + }} + /> + + )} + + ) : null}
{id ? : } diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx index f8d5f6a6c38d8..92c1729a080c2 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx @@ -175,7 +175,6 @@ export function HogFunctionInputRenderer({ value, onChange, schema, disabled }: ) case 'dictionary': return - case 'boolean': return onChange?.(checked)} disabled={disabled} /> case 'integration': diff --git a/frontend/src/scenes/pipeline/hogfunctions/activityDescriptions.tsx b/frontend/src/scenes/pipeline/hogfunctions/activityDescriptions.tsx new file mode 100644 index 0000000000000..872e1beebcaf5 --- /dev/null +++ b/frontend/src/scenes/pipeline/hogfunctions/activityDescriptions.tsx @@ -0,0 +1,232 @@ +import { DiffEditor } from '@monaco-editor/react' +import { + ActivityLogItem, + defaultDescriber, + HumanizedChange, + userNameForLogItem, +} from 'lib/components/ActivityLog/humanizeActivity' +import { LemonDropdown } from 'lib/lemon-ui/LemonDropdown' +import { Link } from 'lib/lemon-ui/Link' +import { initHogLanguage } from 'lib/monaco/languages/hog' +import { urls } from 'scenes/urls' + +import { PipelineNodeTab, PipelineStage } from '~/types' + +const nameOrLinkToHogFunction = (id?: string | null, name?: string | null): string | JSX.Element => { + const displayName = name || '(empty string)' + return id ? ( + + {displayName} + + ) : ( + displayName + ) +} + +export interface DiffProps { + before: string + after: string + language?: string +} + +export function Diff({ before, after, language }: DiffProps): JSX.Element { + return ( + { + if (language === 'hog') { + initHogLanguage(monaco) + } + }} + options={{ + lineNumbers: 'off', + minimap: { enabled: false }, + folding: false, + wordWrap: 'on', + renderLineHighlight: 'none', + scrollbar: { vertical: 'auto', horizontal: 'hidden' }, + overviewRulerBorder: false, + hideCursorInOverviewRuler: true, + overviewRulerLanes: 0, + tabFocusMode: true, + enableSplitViewResizing: false, + renderSideBySide: false, + readOnly: true, + }} + /> + ) +} + +export interface DiffLinkProps extends DiffProps { + children: string | JSX.Element +} + +export function DiffLink({ before, after, language, children }: DiffLinkProps): JSX.Element { + return ( + + +
+ } + > + {children} + + ) +} + +export function hogFunctionActivityDescriber(logItem: ActivityLogItem, asNotification?: boolean): HumanizedChange { + if (logItem.scope != 'HogFunction') { + console.error('HogFunction describer received a non-HogFunction activity') + return { description: null } + } + + const objectNoun = logItem?.detail.type ?? 'hog function' + + if (logItem.activity == 'created') { + return { + description: ( + <> + {userNameForLogItem(logItem)} created the {objectNoun}:{' '} + {nameOrLinkToHogFunction(logItem?.item_id, logItem?.detail.name)} + + ), + } + } + + if (logItem.activity == 'deleted') { + return { + description: ( + <> + {userNameForLogItem(logItem)} deleted the {objectNoun}: {logItem.detail.name} + + ), + } + } + + if (logItem.activity == 'updated') { + const changes: { inline: string | JSX.Element; inlist: string | JSX.Element }[] = [] + for (const change of logItem.detail.changes ?? []) { + switch (change.field) { + case 'encrypted_inputs': { + changes.push({ + inline: 'updated encrypted inputs for', + inlist: 'updated encrypted inputs', + }) + break + } + case 'inputs': { + const changedFields: JSX.Element[] = [] + Object.entries(change.after ?? {}).forEach(([key, value]) => { + const before = JSON.stringify(change.before?.[key]?.value) + const after = JSON.stringify(value?.value) + if (before !== after) { + changedFields.push( + + {key} + + ) + } + }) + const changedSpans: JSX.Element[] = [] + for (let index = 0; index < changedFields.length; index++) { + if (index !== 0 && index === changedFields.length - 1) { + changedSpans.push(<>{' and '}) + } else if (index > 0) { + changedSpans.push(<>{', '}) + } + changedSpans.push(changedFields[index]) + } + const inputOrInputs = changedFields.length === 1 ? 'input' : 'inputs' + changes.push({ + inline: ( + <> + updated the {inputOrInputs} {changedSpans} for + + ), + inlist: ( + <> + updated {inputOrInputs}: {changedSpans} + + ), + }) + break + } + case 'inputs_schema': + case 'filters': + case 'hog': + case 'name': + case 'description': + case 'masking': { + const code = ( + + {change.field === 'hog' + ? 'source code' + : change.field === 'inputs_schema' + ? 'inputs schema' + : change.field} + + ) + changes.push({ inline: <>updated {code} for, inlist: <>updated {code} }) + break + } + case 'deleted': { + if (change.after) { + changes.push({ inline: 'deleted', inlist: `deleted the ${objectNoun}` }) + } else { + changes.push({ inline: 'undeleted', inlist: `undeleted the ${objectNoun}` }) + } + break + } + case 'enabled': { + if (change.after) { + changes.push({ inline: 'enabled', inlist: `enabled the ${objectNoun}` }) + } else { + changes.push({ inline: 'disabled', inlist: `disabled the ${objectNoun}` }) + } + break + } + default: + changes.push({ + inline: `updated unknown field: ${change.field}`, + inlist: `updated unknown field: ${change.field}`, + }) + } + } + const name = userNameForLogItem(logItem) + const functionName = nameOrLinkToHogFunction(logItem?.item_id, logItem?.detail.name) + + return { + description: + changes.length == 1 ? ( + <> + {name} {changes[0].inline} the {objectNoun}: {functionName} + + ) : ( +
+ {name} updated the {objectNoun}: {functionName} +
    + {changes.map((c, i) => ( +
  • {c.inlist}
  • + ))} +
+
+ ), + } + } + return defaultDescriber(logItem, asNotification, nameOrLinkToHogFunction(logItem?.detail.short_id)) +} diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.test.ts b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.test.ts index a6daff1105e64..79cfaf648f8bc 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.test.ts +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.test.ts @@ -183,6 +183,9 @@ describe('hogFunctionConfigurationLogic', () => { person: '{person}', }, }, + debug: { + value: false, + }, }, enabled: true, }) diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx index fa9bacd6f1226..45cc43ccdbbdf 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx @@ -112,7 +112,7 @@ const templateToConfiguration = ( template.inputs_schema?.forEach((schema) => { if (typeof subTemplate?.inputs?.[schema.key] !== 'undefined') { inputs[schema.key] = { value: subTemplate.inputs[schema.key] } - } else if (schema.default) { + } else if (schema.default !== undefined) { inputs[schema.key] = { value: schema.default } } }) diff --git a/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx b/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx index ce0e291ede946..f92b2f9123deb 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/integrations/HogFunctionInputIntegration.tsx @@ -1,4 +1,5 @@ import { useActions } from 'kea' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { HogFunctionInputSchemaType } from '~/types' @@ -12,11 +13,21 @@ export type HogFunctionInputIntegrationProps = IntegrationConfigureProps & { export function HogFunctionInputIntegration({ schema, ...props }: HogFunctionInputIntegrationProps): JSX.Element { const { persistForUnload } = useActions(hogFunctionConfigurationLogic) return ( - persistForUnload()} - /> + <> + persistForUnload()} + /> + {schema.type === 'integration' && schema.integration === 'google-ads' ? ( + + + We are still waiting for our Google Ads integration to be approved. You might see a `Google + hasn’t verified this app` warning when trying to connect your account. + + + ) : null} + ) } diff --git a/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx b/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx index f0496b5eafeb6..cee61f7c80c88 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/integrations/IntegrationChoice.tsx @@ -41,6 +41,8 @@ export function IntegrationChoice({ ? 'Google Cloud Pub/Sub' : kind == 'google-cloud-storage' ? 'Google Cloud Storage' + : kind == 'google-ads' + ? 'Google Ads' : capitalizeFirstLetter(kind) function uploadKey(kind: string): void { @@ -72,7 +74,7 @@ export function IntegrationChoice({ ], } : null, - kind.startsWith('google-') + ['google-pubsub', 'google-cloud-storage'].includes(kind) ? { items: [ { diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.stories.tsx b/frontend/src/scenes/project-homepage/ProjectHomepage.stories.tsx index a3857dc2c8c2b..922c3de9476be 100644 --- a/frontend/src/scenes/project-homepage/ProjectHomepage.stories.tsx +++ b/frontend/src/scenes/project-homepage/ProjectHomepage.stories.tsx @@ -12,11 +12,11 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/dashboards/': require('../dashboard/__mocks__/dashboards.json'), - '/api/projects/:team_id/dashboards/1/': require('../dashboard/__mocks__/dashboard1.json'), - '/api/projects/:team_id/dashboards/1/collaborators/': [], - '/api/projects/:team_id/session_recordings/': EMPTY_PAGINATED_RESPONSE, - '/api/projects/:team_id/insights/my_last_viewed/': [], + '/api/environments/:team_id/dashboards/': require('../dashboard/__mocks__/dashboards.json'), + '/api/environments/:team_id/dashboards/1/': require('../dashboard/__mocks__/dashboard1.json'), + '/api/environments/:team_id/dashboards/1/collaborators/': [], + '/api/environments/:team_id/session_recordings/': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/insights/my_last_viewed/': [], }, }), ], diff --git a/frontend/src/scenes/project-homepage/projectHomepageLogic.test.ts b/frontend/src/scenes/project-homepage/projectHomepageLogic.test.ts index 0818d735c1536..d778680ced854 100644 --- a/frontend/src/scenes/project-homepage/projectHomepageLogic.test.ts +++ b/frontend/src/scenes/project-homepage/projectHomepageLogic.test.ts @@ -15,9 +15,9 @@ describe('projectHomepageLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/dashboards/1/': dashboardJson, - '/api/projects/:team/insights/': { results: ['result from api'] }, - '/api/projects/:team/persons/': { results: ['result from api'] }, + '/api/environments/:team_id/dashboards/1/': dashboardJson, + '/api/environments/:team_id/insights/': { results: ['result from api'] }, + '/api/environments/:team_id/persons/': { results: ['result from api'] }, }, }) initKeaTests() diff --git a/frontend/src/scenes/project-homepage/projectHomepageLogic.tsx b/frontend/src/scenes/project-homepage/projectHomepageLogic.tsx index af349e3aa86bc..619e6765fcc14 100644 --- a/frontend/src/scenes/project-homepage/projectHomepageLogic.tsx +++ b/frontend/src/scenes/project-homepage/projectHomepageLogic.tsx @@ -2,6 +2,7 @@ import { afterMount, connect, kea, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' import { DashboardLogicProps } from 'scenes/dashboard/dashboardLogic' +import { projectLogic } from 'scenes/projectLogic' import { teamLogic } from 'scenes/teamLogic' import { getQueryBasedInsightModel } from '~/queries/nodes/InsightViz/utils' @@ -12,7 +13,7 @@ import type { projectHomepageLogicType } from './projectHomepageLogicType' export const projectHomepageLogic = kea([ path(['scenes', 'project-homepage', 'projectHomepageLogic']), connect({ - values: [teamLogic, ['currentTeamId', 'currentTeam']], + values: [teamLogic, ['currentTeam'], projectLogic, ['currentProjectId']], }), selectors({ @@ -35,7 +36,7 @@ export const projectHomepageLogic = kea([ { loadRecentInsights: async () => { const insights = await api.get( - `api/projects/${values.currentTeamId}/insights/my_last_viewed` + `api/environments/${values.currentProjectId}/insights/my_last_viewed` ) return insights.map((legacyInsight) => getQueryBasedInsightModel(legacyInsight)) }, diff --git a/frontend/src/scenes/projectLogic.ts b/frontend/src/scenes/projectLogic.ts index fca5367075fb6..8712f9d4d8279 100644 --- a/frontend/src/scenes/projectLogic.ts +++ b/frontend/src/scenes/projectLogic.ts @@ -1,6 +1,6 @@ -import { actions, afterMount, connect, kea, listeners, path, reducers } from 'kea' +import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' -import api from 'lib/api' +import api, { ApiConfig } from 'lib/api' import { lemonToast } from 'lib/lemon-ui/LemonToast' import { identifierToHuman, isUserLoggedIn } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' @@ -80,7 +80,15 @@ export const projectLogic = kea([ }, ], })), + selectors({ + currentProjectId: [(s) => [s.currentProject], (currentProject) => currentProject?.id || null], + }), listeners(({ actions }) => ({ + loadCurrentProjectSuccess: ({ currentProject }) => { + if (currentProject) { + ApiConfig.setCurrentProjectId(currentProject.id) + } + }, deleteProject: async ({ project }) => { try { await api.delete(`api/projects/${project.id}`) diff --git a/frontend/src/scenes/saved-insights/SavedInsights.stories.tsx b/frontend/src/scenes/saved-insights/SavedInsights.stories.tsx index 8f9ff797ee74b..673a3076fb654 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.stories.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.stories.tsx @@ -23,7 +23,7 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/insights': toPaginatedResponse( + '/api/environments/:team_id/insights': toPaginatedResponse( insightsJson.results.slice(0, 6).map((result, i) => ({ // Keep size of response in check ...result, @@ -56,7 +56,7 @@ CardView.parameters = { export const EmptyState: Story = () => { useStorybookMocks({ get: { - '/api/projects/:team_id/insights': EMPTY_PAGINATED_RESPONSE, + '/api/environments/:team_id/insights': EMPTY_PAGINATED_RESPONSE, }, }) useEffect(() => { diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 4810a04db4bdf..554f0be7f5c45 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -338,14 +338,14 @@ export const QUERY_TYPES_METADATA: Record = { icon: IconVideoCamera, inMenu: false, }, - [NodeKind.ExperimentTrendQuery]: { - name: 'Experiment Result', + [NodeKind.ExperimentTrendsQuery]: { + name: 'Experiment Trends Result', description: 'View experiment trend result', icon: IconFlask, inMenu: false, }, - [NodeKind.ExperimentFunnelQuery]: { - name: 'Experiment Funnel', + [NodeKind.ExperimentFunnelsQuery]: { + name: 'Experiment Funnels Result', description: 'View experiment funnel result', icon: IconFlask, inMenu: false, diff --git a/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts b/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts index 308be468581cf..47729fab591fa 100644 --- a/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts +++ b/frontend/src/scenes/saved-insights/savedInsightsLogic.test.ts @@ -52,18 +52,18 @@ describe('savedInsightsLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/insights/': (req) => [ + '/api/environments/:team_id/insights/': (req) => [ 200, createSavedInsights( req.url.searchParams.get('search') ?? '', parseInt(req.url.searchParams.get('offset') ?? '0') ), ], - '/api/projects/:team/insights/42': createInsight(42), - '/api/projects/:team/insights/123': createInsight(123), + '/api/environments/:team_id/insights/42': createInsight(42), + '/api/environments/:team_id/insights/123': createInsight(123), }, post: { - '/api/projects/:team/insights/': () => [200, createInsight(42)], + '/api/environments/:team_id/insights/': () => [200, createInsight(42)], }, }) initKeaTests() @@ -192,7 +192,7 @@ describe('savedInsightsLogic', () => { sourceInsight.derived_name = 'should be copied' await logic.asyncActions.duplicateInsight(sourceInsight) expect(api.create).toHaveBeenCalledWith( - `api/projects/${MOCK_TEAM_ID}/insights`, + `api/environments/${MOCK_TEAM_ID}/insights`, expect.objectContaining({ name: '' }), expect.objectContaining({}) ) @@ -204,7 +204,7 @@ describe('savedInsightsLogic', () => { sourceInsight.derived_name = '' await logic.asyncActions.duplicateInsight(sourceInsight) expect(api.create).toHaveBeenCalledWith( - `api/projects/${MOCK_TEAM_ID}/insights`, + `api/environments/${MOCK_TEAM_ID}/insights`, expect.objectContaining({ name: 'should be copied (copy)' }), expect.objectContaining({}) ) diff --git a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts index 923b13bc2999f..1b958165f4f3b 100644 --- a/frontend/src/scenes/saved-insights/savedInsightsLogic.ts +++ b/frontend/src/scenes/saved-insights/savedInsightsLogic.ts @@ -104,7 +104,7 @@ export const savedInsightsLogic = kea([ } const legacyResponse: CountedPaginatedResponse = await api.get( - `api/projects/${teamLogic.values.currentTeamId}/insights/?${toParams(params)}` + `api/environments/${teamLogic.values.currentTeamId}/insights/?${toParams(params)}` ) const response = { ...legacyResponse, diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx index 6b8f14a78cc20..bb9063ed6c933 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-failure.stories.tsx @@ -23,7 +23,7 @@ const meta: Meta = { // API is set up so that everything except the call to load session recording metadata succeeds mswDecorator({ get: { - '/api/projects/:team_id/session_recordings': (req) => { + '/api/environments/:team_id/session_recordings': (req) => { const version = req.url.searchParams.get('version') return [ 200, @@ -88,7 +88,7 @@ const meta: Meta = { const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] }, - '/api/projects/:team/session_recordings/:id/snapshots': (req, res, ctx) => { + '/api/environments/:team_id/session_recordings/:id/snapshots': (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { return res(ctx.text(snapshotsAsJSONLines())) @@ -108,7 +108,7 @@ const meta: Meta = { }, ] }, - '/api/projects/:team/session_recordings/:id': () => { + '/api/environments/:team_id/session_recordings/:id': () => { return [404, {}] }, 'api/projects/:team/notebooks': { @@ -119,7 +119,7 @@ const meta: Meta = { }, }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/environments/:team_id/query': recordingEventsJson, }, }), ], diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx index 39b6bbd3d9980..c4f13b003c2c7 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx @@ -23,7 +23,7 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/session_recordings': (req) => { + '/api/environments/:team_id/session_recordings': (req) => { const version = req.url.searchParams.get('version') return [ 200, @@ -88,7 +88,7 @@ const meta: Meta = { const response = playlistId === '1234567' ? recordings : [] return [200, { has_next: false, results: response, version: 1 }] }, - '/api/projects/:team/session_recordings/:id/snapshots': (req, res, ctx) => { + '/api/environments/:team_id/session_recordings/:id/snapshots': (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { return res(ctx.text(snapshotsAsJSONLines())) @@ -108,7 +108,7 @@ const meta: Meta = { }, ] }, - '/api/projects/:team/session_recordings/:id': recordingMetaJson, + '/api/environments/:team_id/session_recordings/:id': recordingMetaJson, 'api/projects/:team/notebooks': { count: 0, next: null, @@ -117,7 +117,7 @@ const meta: Meta = { }, }, post: { - '/api/projects/:team/query': (req, res, ctx) => { + '/api/environments/:team_id/query': (req, res, ctx) => { const body = req.body as Record if ( diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx index f90ee0ed1285a..86ef8078ac397 100644 --- a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx +++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx @@ -22,7 +22,7 @@ const meta: Meta = { mswDecorator({ get: { '/api/projects/:team_id/session_recording_playlists': recording_playlists, - '/api/projects/:team_id/session_recordings': (req) => { + '/api/environments/:team_id/session_recordings': (req) => { const version = req.url.searchParams.get('version') return [ 200, @@ -35,7 +35,7 @@ const meta: Meta = { }, }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/environments/:team_id/query': recordingEventsJson, }, }), ], diff --git a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx index 9dd41215f9190..c56eeaf3a9683 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/PlayerInspectorControls.tsx @@ -5,6 +5,7 @@ import { FEATURE_FLAGS } from 'lib/constants' import { IconUnverifiedEvent } from 'lib/lemon-ui/icons' import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' +import { userPreferencesLogic } from 'lib/logic/userPreferencesLogic' import { capitalizeFirstLetter } from 'lib/utils' import { IconWindow } from 'scenes/session-recordings/player/icons' @@ -19,6 +20,81 @@ import { import { InspectorSearchInfo } from './components/InspectorSearchInfo' import { playerInspectorLogic } from './playerInspectorLogic' +function HideProperties(): JSX.Element | null { + const { logicProps } = useValues(sessionRecordingPlayerLogic) + const inspectorLogic = playerInspectorLogic(logicProps) + const { tab } = useValues(inspectorLogic) + const { hidePostHogPropertiesInTable } = useValues(userPreferencesLogic) + const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) + + return tab === SessionRecordingPlayerTab.EVENTS ? ( + + ) : null +} + +function MiniFilters(): JSX.Element { + const { miniFilters } = useValues(playerSettingsLogic) + const { setMiniFilter } = useActions(playerSettingsLogic) + + return ( +
+ {miniFilters.map((filter) => ( + { + // "alone" should always be a select-to-true action + setMiniFilter(filter.key, filter.alone || !filter.enabled) + }} + tooltip={filter.tooltip} + > + {filter.name} + + ))} +
+ ) +} + +function WindowSelector(): JSX.Element { + const { logicProps } = useValues(sessionRecordingPlayerLogic) + const inspectorLogic = playerInspectorLogic(logicProps) + const { windowIdFilter, windowIds } = useValues(inspectorLogic) + const { setWindowIdFilter } = useActions(inspectorLogic) + + return windowIds.length > 1 ? ( + setWindowIdFilter(val || null)} + options={[ + { + value: null, + label: 'All windows', + icon: , + }, + ...windowIds.map((windowId, index) => ({ + value: windowId, + label: `Window ${index + 1}`, + icon: , + })), + ]} + tooltip="Each recording window translates to a distinct browser tab or window." + /> + ) : ( + // returns an empty div to keep spacing/positioning consistent +
+ ) +} + export const TabToIcon = { [SessionRecordingPlayerTab.ALL]: undefined, [SessionRecordingPlayerTab.EVENTS]: IconUnverifiedEvent, @@ -68,10 +144,10 @@ function TabButtons({ export function PlayerInspectorControls(): JSX.Element { const { logicProps } = useValues(sessionRecordingPlayerLogic) const inspectorLogic = playerInspectorLogic(logicProps) - const { tab, windowIdFilter, windowIds, showMatchingEventsFilter } = useValues(inspectorLogic) - const { setWindowIdFilter, setTab } = useActions(inspectorLogic) - const { showOnlyMatching, miniFilters, searchQuery } = useValues(playerSettingsLogic) - const { setShowOnlyMatching, setMiniFilter, setSearchQuery } = useActions(playerSettingsLogic) + const { tab, showMatchingEventsFilter } = useValues(inspectorLogic) + const { setTab } = useActions(inspectorLogic) + const { showOnlyMatching, searchQuery } = useValues(playerSettingsLogic) + const { setShowOnlyMatching, setSearchQuery } = useActions(playerSettingsLogic) const mode = logicProps.mode ?? SessionRecordingPlayerMode.Standard @@ -108,67 +184,27 @@ export function PlayerInspectorControls(): JSX.Element {
-
- setSearchQuery(e)} - placeholder="Search..." - type="search" - value={searchQuery} - fullWidth - className="min-w-60" - suffix={ - }> - - - } - /> -
+ setSearchQuery(e)} + placeholder="Search..." + type="search" + value={searchQuery} + fullWidth + className="min-w-60" + suffix={ + }> + + + } + /> -
- {miniFilters.map((filter) => ( - { - // "alone" should always be a select-to-true action - setMiniFilter(filter.key, filter.alone || !filter.enabled) - }} - tooltip={filter.tooltip} - > - {filter.name} - - ))} -
+ - {windowIds.length > 1 ? ( -
- setWindowIdFilter(val || null)} - options={[ - { - value: null, - label: 'All windows', - icon: , - }, - ...windowIds.map((windowId, index) => ({ - value: windowId, - label: `Window ${index + 1}`, - icon: , - })), - ]} - tooltip="Each recording window translates to a distinct browser tab or window." - /> -
- ) : null} +
+ + +
{showMatchingEventsFilter ? (
diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx index 97a7dc544e11c..9590dd5be3e5c 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx @@ -1,4 +1,5 @@ import { LemonButton, LemonDivider } from '@posthog/lemon-ui' +import { useValues } from 'kea' import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' @@ -7,6 +8,7 @@ import { IconOpenInNew } from 'lib/lemon-ui/icons' import { Spinner } from 'lib/lemon-ui/Spinner' import { autoCaptureEventToDescription, capitalizeFirstLetter, isString } from 'lib/utils' import { insightUrlForEvent } from 'scenes/insights/utils' +import { eventPropertyFilteringLogic } from 'scenes/session-recordings/player/inspector/components/eventPropertyFilteringLogic' import { InspectorListItemEvent } from '../playerInspectorLogic' import { SimpleKeyValueList } from './SimpleKeyValueList' @@ -51,6 +53,7 @@ function SummarizeWebVitals({ properties }: { properties: Record }) export function ItemEvent({ item, expanded, setExpanded }: ItemEventProps): JSX.Element { const insightUrl = insightUrlForEvent(item.data) + const { promoteProperties, filterProperties } = useValues(eventPropertyFilteringLogic) const subValue = item.data.event === '$pageview' ? ( @@ -61,25 +64,7 @@ export function ItemEvent({ item, expanded, setExpanded }: ItemEventProps): JSX. ) : undefined - let promotedKeys: string[] | undefined = undefined - if (item.data.event === '$pageview') { - promotedKeys = ['$current_url', '$title', '$referrer'] - } else if (item.data.event === '$groupidentify') { - promotedKeys = ['$group_type', '$group_key', '$group_set'] - } else if (item.data.event === '$screen') { - promotedKeys = ['$screen_name'] - } else if (item.data.event === '$web_vitals') { - promotedKeys = [ - '$web_vitals_FCP_value', - '$web_vitals_CLS_value', - '$web_vitals_INP_value', - '$web_vitals_LCP_value', - '$web_vitals_FCP_event', - '$web_vitals_CLS_event', - '$web_vitals_INP_event', - '$web_vitals_LCP_event', - ] - } + const promotedKeys = promoteProperties(item.data.event) return (
@@ -129,7 +114,10 @@ export function ItemEvent({ item, expanded, setExpanded }: ItemEventProps): JSX. item.data.event === '$exception' ? ( ) : ( - + ) ) : (
diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/SimpleKeyValueList.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/SimpleKeyValueList.tsx index 7f51b758bd253..9b530fce41d81 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/SimpleKeyValueList.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/SimpleKeyValueList.tsx @@ -1,6 +1,8 @@ // A React component that renders a list of key-value pairs in a simple way. import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' +import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' +import { getCoreFilterDefinition } from 'lib/taxonomy' import { useEffect, useState } from 'react' export interface SimpleKeyValueListProps { @@ -22,10 +24,14 @@ export function SimpleKeyValueList({ useEffect(() => { const sortedItems = Object.entries(item).sort((a, b) => { - if (a[0] < b[0]) { + // if this is a posthog property we want to sort by its label + const left = getCoreFilterDefinition(a[0], TaxonomicFilterGroupType.EventProperties)?.label || a[0] + const right = getCoreFilterDefinition(b[0], TaxonomicFilterGroupType.EventProperties)?.label || b[0] + + if (left < right) { return -1 } - if (a[0] > b[0]) { + if (left > right) { return 1 } return 0 diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/eventPropertyFilteringLogic.ts b/frontend/src/scenes/session-recordings/player/inspector/components/eventPropertyFilteringLogic.ts new file mode 100644 index 0000000000000..2f3ca7b84ae6c --- /dev/null +++ b/frontend/src/scenes/session-recordings/player/inspector/components/eventPropertyFilteringLogic.ts @@ -0,0 +1,61 @@ +import { connect, kea, path, selectors } from 'kea' +import { userPreferencesLogic } from 'lib/logic/userPreferencesLogic' +import { NON_DOLLAR_POSTHOG_PROPERTY_KEYS, PROPERTY_KEYS } from 'lib/taxonomy' +import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' + +import type { eventPropertyFilteringLogicType } from './eventPropertyFilteringLogicType' + +export const eventPropertyFilteringLogic = kea([ + path(['scenes', 'session-recordings', 'player', 'inspector', 'components', 'eventPropertyFilteringLogic']), + connect({ + values: [userPreferencesLogic, ['hidePostHogPropertiesInTable'], preflightLogic, ['isCloudOrDev']], + }), + selectors({ + promoteProperties: [ + () => [], + () => { + return (event: string): string[] | undefined => { + if (['$pageview', '$pageleave'].includes(event)) { + return ['$current_url', '$title', '$referrer'] + } else if (event === '$groupidentify') { + return ['$group_type', '$group_key', '$group_set'] + } else if (event === '$screen') { + return ['$screen_name'] + } else if (event === '$web_vitals') { + return [ + '$web_vitals_FCP_value', + '$web_vitals_CLS_value', + '$web_vitals_INP_value', + '$web_vitals_LCP_value', + '$web_vitals_FCP_event', + '$web_vitals_CLS_event', + '$web_vitals_INP_event', + '$web_vitals_LCP_event', + ] + } else if (event === '$set') { + return ['$set', '$set_once'] + } + } + }, + ], + filterProperties: [ + (s) => [s.hidePostHogPropertiesInTable, s.isCloudOrDev], + (hidePostHogPropertiesInTable, isCloudOrDev) => { + return (props: Record) => { + if (!hidePostHogPropertiesInTable) { + return props + } + + return Object.fromEntries( + Object.entries(props).filter(([key]) => { + const isPostHogProperty = key.startsWith('$') && PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = + isCloudOrDev && NON_DOLLAR_POSTHOG_PROPERTY_KEYS.includes(key) + return !isPostHogProperty && !isNonDollarPostHogProperty + }) + ) + } + }, + ], + }), +]) diff --git a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts index 5f9c7bb8ffa77..dafd05c304ff3 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/inspector/playerInspectorLogic.test.ts @@ -15,7 +15,7 @@ describe('playerInspectorLogic', () => { beforeEach(() => { useMocks({ get: { - 'api/projects/:team_id/session_recordings/1/': {}, + 'api/environments/:team_id/session_recordings/1/': {}, 'api/projects/:team/notebooks/recording_comments': { results: [ { diff --git a/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts b/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts index 838f51c78e6f5..77fc0fde434e0 100644 --- a/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/modal/sessionPlayerModalLogic.test.ts @@ -13,7 +13,7 @@ describe('sessionPlayerModalLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/session_recordings': [ + '/api/environments/:team_id/session_recordings': [ 200, { results: listOfSessionRecordings, diff --git a/frontend/src/scenes/session-recordings/player/playerMetaLogic.test.ts b/frontend/src/scenes/session-recordings/player/playerMetaLogic.test.ts index f21c3f7189f6e..4fccafcfb856a 100644 --- a/frontend/src/scenes/session-recordings/player/playerMetaLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/playerMetaLogic.test.ts @@ -19,12 +19,12 @@ describe('playerMetaLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/session_recordings/:id': recordingMetaJson, - '/api/projects/:team/session_recordings/:id/snapshots/': (_, res, ctx) => + '/api/environments/:team_id/session_recordings/:id': recordingMetaJson, + '/api/environments/:team_id/session_recordings/:id/snapshots/': (_, res, ctx) => res(ctx.text(snapshotsAsJSONLines())), }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/environments/:team_id/query': recordingEventsJson, }, }) initKeaTests() diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts index d42f36b416a99..353a2aa04236c 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts @@ -43,7 +43,7 @@ describe('sessionRecordingDataLogic', () => { useAvailableFeatures([AvailableFeature.RECORDINGS_PERFORMANCE]) useMocks({ get: { - '/api/projects/:team/session_recordings/:id/snapshots': async (req, res, ctx) => { + '/api/environments/:team_id/session_recordings/:id/snapshots': async (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { return res(ctx.text(snapshotsAsJSONLines())) @@ -69,13 +69,13 @@ describe('sessionRecordingDataLogic', () => { }, ] }, - '/api/projects/:team/session_recordings/:id': recordingMetaJson, + '/api/environments/:team_id/session_recordings/:id': recordingMetaJson, }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/environments/:team_id/query': recordingEventsJson, }, patch: { - '/api/projects/:team/session_recordings/:id': { success: true }, + '/api/environments/:team_id/session_recordings/:id': { success: true }, }, }) initKeaTests() @@ -139,7 +139,7 @@ describe('sessionRecordingDataLogic', () => { logic.unmount() useMocks({ get: { - '/api/projects/:team/session_recordings/:id': () => [500, { status: 0 }], + '/api/environments/:team_id/session_recordings/:id': () => [500, { status: 0 }], }, }) logic.mount() @@ -170,7 +170,7 @@ describe('sessionRecordingDataLogic', () => { logic.unmount() useMocks({ get: { - '/api/projects/:team/session_recordings/:id/snapshots': () => [500, { status: 0 }], + '/api/environments/:team_id/session_recordings/:id/snapshots': () => [500, { status: 0 }], }, }) logic.mount() @@ -224,7 +224,7 @@ describe('sessionRecordingDataLogic', () => { }).toDispatchActions(['loadEvents', 'loadEventsSuccess']) expect(api.create).toHaveBeenCalledWith( - `api/projects/${MOCK_TEAM_ID}/query`, + `api/environments/${MOCK_TEAM_ID}/query`, { client_query_id: undefined, query: { diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 03b04f500c483..4ed931d99f3ea 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -28,6 +28,7 @@ import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import posthog from 'posthog-js' import { compressedEventWithTime } from 'posthog-js/lib/src/extensions/replay/sessionrecording' import { RecordingComment } from 'scenes/session-recordings/player/inspector/playerInspectorLogic' +import { teamLogic } from 'scenes/teamLogic' import { HogQLQuery, NodeKind } from '~/queries/schema' import { hogql } from '~/queries/utils' @@ -403,7 +404,7 @@ export const sessionRecordingDataLogic = kea([ key(({ sessionRecordingId }) => sessionRecordingId || 'no-session-recording-id'), connect({ logic: [eventUsageLogic], - values: [featureFlagLogic, ['featureFlags']], + values: [featureFlagLogic, ['featureFlags'], teamLogic, ['currentTeam']], }), defaults({ sessionPlayerMetaData: null as SessionRecordingType | null, @@ -1059,8 +1060,8 @@ export const sessionRecordingDataLogic = kea([ ], snapshotsInvalid: [ - (s, p) => [s.snapshotsByWindowId, s.fullyLoaded, s.start, p.sessionRecordingId], - (snapshotsByWindowId, fullyLoaded, start, sessionRecordingId): boolean => { + (s, p) => [s.snapshotsByWindowId, s.fullyLoaded, s.start, p.sessionRecordingId, s.currentTeam], + (snapshotsByWindowId, fullyLoaded, start, sessionRecordingId, currentTeam): boolean => { if (!fullyLoaded || !start) { return false } @@ -1081,10 +1082,14 @@ export const sessionRecordingDataLogic = kea([ // video is definitely unplayable posthog.capture('recording_has_no_full_snapshot', { sessionId: sessionRecordingId, + teamId: currentTeam?.id, + teamName: currentTeam?.name, }) } else if (anyWindowMissingFullSnapshot) { posthog.capture('recording_window_missing_full_snapshot', { sessionId: sessionRecordingId, + teamID: currentTeam?.id, + teamName: currentTeam?.name, }) } diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts index 4724e4369cdf2..3dde171f5c309 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.test.ts @@ -27,7 +27,7 @@ describe('sessionRecordingPlayerLogic', () => { useMocks({ get: { '/api/projects/:team_id/session_recordings/:id/comments/': { results: [] }, - '/api/projects/:team/session_recordings/:id/snapshots/': (req, res, ctx) => { + '/api/environments/:team_id/session_recordings/:id/snapshots/': (req, res, ctx) => { // with no sources, returns sources... if (req.url.searchParams.get('source') === 'blob') { return res(ctx.text(snapshotsAsJSONLines())) @@ -47,13 +47,13 @@ describe('sessionRecordingPlayerLogic', () => { }, ] }, - '/api/projects/:team/session_recordings/:id': recordingMetaJson, + '/api/environments/:team_id/session_recordings/:id': recordingMetaJson, }, delete: { - '/api/projects/:team/session_recordings/:id': { success: true }, + '/api/environments/:team_id/session_recordings/:id': { success: true }, }, post: { - '/api/projects/:team/query': recordingEventsJson, + '/api/environments/:team_id/query': recordingEventsJson, }, }) initKeaTests() @@ -128,7 +128,7 @@ describe('sessionRecordingPlayerLogic', () => { useMocks({ get: { - '/api/projects/:team/session_recordings/:id/snapshots': () => [500, { status: 0 }], + '/api/environments/:team_id/session_recordings/:id/snapshots': () => [500, { status: 0 }], }, }) logic.mount() @@ -194,7 +194,7 @@ describe('sessionRecordingPlayerLogic', () => { sessionRecordingsPlaylistLogic({ updateSearchParams: true }).actionTypes.loadAllRecordings, ]) - expect(api.delete).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/session_recordings/3`) + expect(api.delete).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/session_recordings/3`) resumeKeaLoadersErrors() }) @@ -218,7 +218,7 @@ describe('sessionRecordingPlayerLogic', () => { listLogic.actionCreators.setSelectedRecordingId(null), ]) - expect(api.delete).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/session_recordings/3`) + expect(api.delete).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/session_recordings/3`) resumeKeaLoadersErrors() }) @@ -240,7 +240,7 @@ describe('sessionRecordingPlayerLogic', () => { expect(router.values.location.pathname).toEqual(urls.replay()) - expect(api.delete).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/session_recordings/3`) + expect(api.delete).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/session_recordings/3`) resumeKeaLoadersErrors() }) @@ -261,7 +261,7 @@ describe('sessionRecordingPlayerLogic', () => { expect(router.values.location.pathname).toEqual('/') - expect(api.delete).toHaveBeenCalledWith(`api/projects/${MOCK_TEAM_ID}/session_recordings/3`) + expect(api.delete).toHaveBeenCalledWith(`api/environments/${MOCK_TEAM_ID}/session_recordings/3`) resumeKeaLoadersErrors() }) }) diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts index 0b032a2f5f1e0..881f1202d120c 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListPropertiesLogic.test.ts @@ -39,7 +39,7 @@ describe('sessionRecordingsListPropertiesLogic', () => { beforeEach(() => { useMocks({ post: { - '/api/projects/:team/query': { + '/api/environments/:team_id/query': { results: [ ['s1', JSON.stringify({ blah: 'blah1' })], ['s2', JSON.stringify({ blah: 'blah2' })], diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts index 309362176d427..876ef29c38af9 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.test.ts @@ -43,7 +43,7 @@ describe('sessionRecordingsPlaylistLogic', () => { beforeEach(() => { useMocks({ get: { - '/api/projects/:team/session_recordings/properties': { + '/api/environments/:team_id/session_recordings/properties': { results: [ { id: 's1', properties: { blah: 'blah1' } }, { id: 's2', properties: { blah: 'blah2' } }, @@ -52,7 +52,7 @@ describe('sessionRecordingsPlaylistLogic', () => { 'api/projects/:team/property_definitions/seen_together': { $pageview: true }, - '/api/projects/:team/session_recordings': (req) => { + '/api/environments/:team_id/session_recordings': (req) => { const { searchParams } = req.url if ( (searchParams.get('events')?.length || 0) > 0 && diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx index 5effa45c14085..e3d49dddd1c23 100644 --- a/frontend/src/scenes/settings/SettingsMap.tsx +++ b/frontend/src/scenes/settings/SettingsMap.tsx @@ -3,6 +3,8 @@ import { PersonsJoinMode } from 'scenes/settings/environment/PersonsJoinMode' import { PersonsOnEvents } from 'scenes/settings/environment/PersonsOnEvents' import { SessionsTableVersion } from 'scenes/settings/environment/SessionsTableVersion' +import { Realm } from '~/types' + import { AutocaptureSettings, ExceptionAutocaptureSettings, @@ -469,6 +471,7 @@ export const SETTINGS_MAP: SettingSection[] = [ id: 'optout', title: 'Anonymize data collection', component: , + hideOn: [Realm.Cloud], }, { id: 'hedgehog-mode', diff --git a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx index dce1fbe08efad..e666ec6d20bef 100644 --- a/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx +++ b/frontend/src/scenes/settings/environment/SessionRecordingSettings.tsx @@ -1,8 +1,9 @@ -import { IconPlus } from '@posthog/icons' +import { IconPencil, IconPlus, IconTrash } from '@posthog/icons' import { LemonBanner, LemonButton, LemonDialog, + LemonInput, LemonSegmentedButton, LemonSegmentedButtonOption, LemonSelect, @@ -11,23 +12,32 @@ import { Link, Spinner, } from '@posthog/lemon-ui' +import clsx from 'clsx' import { useActions, useValues } from 'kea' +import { Form } from 'kea-forms' import { AuthorizedUrlList } from 'lib/components/AuthorizedUrlList/AuthorizedUrlList' import { AuthorizedUrlListType } from 'lib/components/AuthorizedUrlList/authorizedUrlListLogic' import { EventSelect } from 'lib/components/EventSelect/EventSelect' +import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FlagSelector } from 'lib/components/FlagSelector' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { PropertySelect } from 'lib/components/PropertySelect/PropertySelect' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' +import { FEATURE_FLAGS, SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' import { IconCancel, IconSelectEvents } from 'lib/lemon-ui/icons' +import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' import { objectsEqual } from 'lib/utils' -import { sessionReplayLinkedFlagLogic } from 'scenes/settings/environment/sessionReplayLinkedFlagLogic' +import { sessionReplayIngestionControlLogic } from 'scenes/settings/environment/sessionReplayIngestionControlLogic' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' -import { AvailableFeature, MultivariateFlagOptions, SessionRecordingAIConfig } from '~/types' +import { + AvailableFeature, + MultivariateFlagOptions, + SessionRecordingAIConfig, + SessionReplayUrlTriggerConfig, +} from '~/types' function LogCaptureSettings(): JSX.Element { const { updateCurrentTeam } = useActions(teamLogic) @@ -259,9 +269,8 @@ function LinkedFlagSelector(): JSX.Element | null { const featureFlagRecordingFeatureEnabled = hasAvailableFeature(AvailableFeature.REPLAY_FEATURE_FLAG_BASED_RECORDING) - const logic = sessionReplayLinkedFlagLogic({ id: currentTeam?.session_recording_linked_flag?.id || null }) - const { linkedFlag, featureFlagLoading, flagHasVariants } = useValues(logic) - const { selectFeatureFlag } = useActions(logic) + const { linkedFlag, featureFlagLoading, flagHasVariants } = useValues(sessionReplayIngestionControlLogic) + const { selectFeatureFlag } = useActions(sessionReplayIngestionControlLogic) if (!featureFlagRecordingFeatureEnabled) { return null @@ -330,6 +339,123 @@ function LinkedFlagSelector(): JSX.Element | null { ) } +function UrlTriggerForm(): JSX.Element { + const { cancelProposingUrlTrigger } = useActions(sessionReplayIngestionControlLogic) + const { isProposedUrlTriggerSubmitting } = useValues(sessionReplayIngestionControlLogic) + + return ( +
+
+ + + + + + +
+
+ + Cancel + + + Save + +
+ + ) +} + +function UrlTriggerRow({ trigger, index }: { trigger: SessionReplayUrlTriggerConfig; index: number }): JSX.Element { + const { editUrlTriggerIndex } = useValues(sessionReplayIngestionControlLogic) + const { setEditUrlTriggerIndex, removeUrlTrigger } = useActions(sessionReplayIngestionControlLogic) + + if (editUrlTriggerIndex === index) { + return ( +
+ +
+ ) + } + + return ( +
+ + {trigger.matching === 'regex' ? 'Matches regex: ' : ''} {trigger.url} + +
+ } + onClick={() => setEditUrlTriggerIndex(index)} + tooltip="Edit" + center + /> + + } + tooltip="Remove URL trigger" + center + onClick={() => { + LemonDialog.open({ + title: <>Remove URL trigger, + description: `Are you sure you want to remove this URL trigger?`, + primaryButton: { + status: 'danger', + children: 'Remove', + onClick: () => removeUrlTrigger(index), + }, + secondaryButton: { + children: 'Cancel', + }, + }) + }} + /> +
+
+ ) +} + +function UrlTriggerOptions(): JSX.Element | null { + const { isAddUrlTriggerConfigFormVisible, urlTriggerConfig } = useValues(sessionReplayIngestionControlLogic) + const { newUrlTrigger } = useActions(sessionReplayIngestionControlLogic) + + return ( +
+
+ Enable recordings when URL matches + { + newUrlTrigger() + }} + type="secondary" + icon={} + data-attr="session-replay-add-url-trigger" + > + Add + +
+

+ Adding a URL trigger means recording will only be started when the user visits a page that matches the + URL. +

+ + {isAddUrlTriggerConfigFormVisible && } + {urlTriggerConfig?.map((trigger, index) => ( + + ))} +
+ ) +} + export function ReplayCostControl(): JSX.Element | null { const { updateCurrentTeam } = useActions(teamLogic) const { currentTeam } = useValues(teamLogic) @@ -484,6 +610,9 @@ export function ReplayCostControl(): JSX.Element | null { )} + + + ) diff --git a/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts b/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts new file mode 100644 index 0000000000000..b0d2057631f9d --- /dev/null +++ b/frontend/src/scenes/settings/environment/sessionReplayIngestionControlLogic.ts @@ -0,0 +1,158 @@ +import { actions, afterMount, connect, kea, listeners, path, props, reducers, selectors, sharedListeners } from 'kea' +import { forms } from 'kea-forms' +import { loaders } from 'kea-loaders' +import { subscriptions } from 'kea-subscriptions' +import api from 'lib/api' +import { isObject } from 'lib/utils' +import { variantKeyToIndexFeatureFlagPayloads } from 'scenes/feature-flags/featureFlagLogic' +import { teamLogic } from 'scenes/teamLogic' + +import { FeatureFlagBasicType, SessionReplayUrlTriggerConfig, TeamPublicType, TeamType } from '~/types' + +import type { sessionReplayIngestionControlLogicType } from './sessionReplayIngestionControlLogicType' + +const NEW_URL_TRIGGER = { url: '', matching: 'regex' } + +export const sessionReplayIngestionControlLogic = kea([ + path(['scenes', 'settings', 'project', 'sessionReplayIngestionControlLogic']), + actions({ + selectFeatureFlag: (flag: FeatureFlagBasicType) => ({ flag }), + setUrlTriggerConfig: (urlTriggerConfig: SessionReplayUrlTriggerConfig[]) => ({ urlTriggerConfig }), + + addUrlTrigger: (urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ urlTriggerConfig }), + removeUrlTrigger: (index: number) => ({ index }), + updateUrlTrigger: (index: number, urlTriggerConfig: SessionReplayUrlTriggerConfig) => ({ + index, + urlTriggerConfig, + }), + setEditUrlTriggerIndex: (originalIndex: number | null) => ({ originalIndex }), + newUrlTrigger: true, + cancelProposingUrlTrigger: true, + }), + connect({ values: [teamLogic, ['currentTeam']], actions: [teamLogic, ['updateCurrentTeam']] }), + reducers({ + selectedFlag: [ + null as FeatureFlagBasicType | null, + { + selectFeatureFlag: (_, { flag }) => flag, + }, + ], + urlTriggerConfig: [ + null as SessionReplayUrlTriggerConfig[] | null, + { + setUrlTriggerConfig: (_, { urlTriggerConfig }) => urlTriggerConfig, + addUrlTrigger: (state, { urlTriggerConfig }) => [...(state ?? []), urlTriggerConfig], + updateUrlTrigger: (state, { index, urlTriggerConfig: newUrlTriggerConfig }) => + (state ?? []).map((triggerConfig, i) => (i === index ? newUrlTriggerConfig : triggerConfig)), + removeUrlTrigger: (state, { index }) => { + return (state ?? []).filter((_, i) => i !== index) + }, + }, + ], + editUrlTriggerIndex: [ + null as number | null, + { + setEditUrlTriggerIndex: (_, { originalIndex }) => originalIndex, + removeUrlTrigger: (editUrlTriggerIndex, { index }) => + editUrlTriggerIndex && index < editUrlTriggerIndex + ? editUrlTriggerIndex - 1 + : index === editUrlTriggerIndex + ? null + : editUrlTriggerIndex, + newUrlTrigger: () => -1, + updateUrlTrigger: () => null, + addUrlTrigger: () => null, + cancelProposingUrlTrigger: () => null, + }, + ], + }), + props({}), + loaders(({ values }) => ({ + featureFlag: { + loadFeatureFlag: async () => { + if (values.linkedFeatureFlagId) { + const retrievedFlag = await api.featureFlags.get(values.linkedFeatureFlagId) + return variantKeyToIndexFeatureFlagPayloads(retrievedFlag) + } + return null + }, + }, + })), + selectors({ + linkedFeatureFlagId: [ + (s) => [s.currentTeam], + (currentTeam) => currentTeam?.session_recording_linked_flag?.id || null, + ], + linkedFlag: [ + (s) => [s.featureFlag, s.selectedFlag, s.currentTeam], + // an existing linked flag is loaded from the API, + // a newly chosen flag is selected can be passed in + // the current team is used to ensure that we don't show stale values + // as people change the selection + (featureFlag, selectedFlag, currentTeam) => + currentTeam?.session_recording_linked_flag?.id ? selectedFlag || featureFlag : null, + ], + flagHasVariants: [(s) => [s.linkedFlag], (linkedFlag) => isObject(linkedFlag?.filters.multivariate)], + remoteUrlTriggerConfig: [ + (s) => [s.currentTeam], + (currentTeam) => currentTeam?.session_recording_url_trigger_config, + ], + isAddUrlTriggerConfigFormVisible: [ + (s) => [s.editUrlTriggerIndex], + (editUrlTriggerIndex) => editUrlTriggerIndex === -1, + ], + urlTriggerToEdit: [ + (s) => [s.urlTriggerConfig, s.editUrlTriggerIndex], + (urlTriggerConfig, editUrlTriggerIndex) => { + if ( + editUrlTriggerIndex === null || + editUrlTriggerIndex === -1 || + !urlTriggerConfig?.[editUrlTriggerIndex] + ) { + return NEW_URL_TRIGGER + } + return urlTriggerConfig[editUrlTriggerIndex] + }, + ], + }), + afterMount(({ actions }) => { + actions.loadFeatureFlag() + }), + subscriptions(({ actions }) => ({ + currentTeam: (currentTeam: TeamPublicType | TeamType | null) => { + actions.setUrlTriggerConfig(currentTeam?.session_recording_url_trigger_config ?? []) + }, + })), + forms(({ values, actions }) => ({ + proposedUrlTrigger: { + defaults: { url: '', matching: 'regex' } as SessionReplayUrlTriggerConfig, + submit: async ({ url, matching }) => { + if (values.editUrlTriggerIndex !== null && values.editUrlTriggerIndex >= 0) { + actions.updateUrlTrigger(values.editUrlTriggerIndex, { url, matching }) + } else { + actions.addUrlTrigger({ url, matching }) + } + }, + }, + })), + sharedListeners(({ values }) => ({ + saveUrlTriggers: async () => { + await teamLogic.asyncActions.updateCurrentTeam({ + session_recording_url_trigger_config: values.urlTriggerConfig ?? [], + }) + }, + })), + listeners(({ sharedListeners, actions, values }) => ({ + setEditUrlTriggerIndex: () => { + actions.setProposedUrlTriggerValue('url', values.urlTriggerToEdit.url) + actions.setProposedUrlTriggerValue('matching', values.urlTriggerToEdit.matching) + }, + addUrlTrigger: sharedListeners.saveUrlTriggers, + removeUrlTrigger: sharedListeners.saveUrlTriggers, + updateUrlTrigger: sharedListeners.saveUrlTriggers, + submitProposedUrlTriggerSuccess: () => { + actions.setEditUrlTriggerIndex(null) + actions.resetProposedUrlTrigger() + }, + })), +]) diff --git a/frontend/src/scenes/settings/environment/teamMembersLogic.tsx b/frontend/src/scenes/settings/environment/teamMembersLogic.tsx index 650ea406daa05..23274ece6ae8d 100644 --- a/frontend/src/scenes/settings/environment/teamMembersLogic.tsx +++ b/frontend/src/scenes/settings/environment/teamMembersLogic.tsx @@ -32,12 +32,12 @@ export const teamMembersLogic = kea([ explicitMembers: { __default: [] as ExplicitTeamMemberType[], loadMembers: async () => { - return await api.get(`api/projects/${teamLogic.values.currentTeamId}/explicit_members/`) + return await api.get(`api/environments/${teamLogic.values.currentTeamId}/explicit_members/`) }, addMembers: async ({ userUuids, level }: AddMembersFields) => { const newMembers: ExplicitTeamMemberType[] = await Promise.all( userUuids.map((userUuid) => - api.create(`api/projects/${teamLogic.values.currentTeamId}/explicit_members/`, { + api.create(`api/environments/${teamLogic.values.currentTeamId}/explicit_members/`, { user_uuid: userUuid, level, }) @@ -49,7 +49,9 @@ export const teamMembersLogic = kea([ return [...values.explicitMembers, ...newMembers] }, removeMember: async ({ member }: { member: BaseMemberType }) => { - await api.delete(`api/projects/${teamLogic.values.currentTeamId}/explicit_members/${member.user.uuid}/`) + await api.delete( + `api/environments/${teamLogic.values.currentTeamId}/explicit_members/${member.user.uuid}/` + ) lemonToast.success( <> {member.user.uuid === userLogic.values.user?.uuid @@ -164,7 +166,7 @@ export const teamMembersLogic = kea([ })), listeners(({ actions }) => ({ changeUserAccessLevel: async ({ user, newLevel }) => { - await api.update(`api/projects/${teamLogic.values.currentTeamId}/explicit_members/${user.uuid}/`, { + await api.update(`api/environments/${teamLogic.values.currentTeamId}/explicit_members/${user.uuid}/`, { level: newLevel, }) lemonToast.success( diff --git a/frontend/src/scenes/settings/settingsLogic.ts b/frontend/src/scenes/settings/settingsLogic.ts index d249ce1627113..3c825e7569011 100644 --- a/frontend/src/scenes/settings/settingsLogic.ts +++ b/frontend/src/scenes/settings/settingsLogic.ts @@ -2,9 +2,12 @@ import { actions, connect, kea, key, listeners, path, props, reducers, selectors import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { copyToClipboard } from 'lib/utils/copyToClipboard' +import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' +import { Realm } from '~/types' + import type { settingsLogicType } from './settingsLogicType' import { SETTINGS_MAP } from './SettingsMap' import { Setting, SettingId, SettingLevelId, SettingSection, SettingSectionId, SettingsLogicProps } from './types' @@ -14,7 +17,7 @@ export const settingsLogic = kea([ key((props) => props.logicKey ?? 'global'), path((key) => ['scenes', 'settings', 'settingsLogic', key]), connect({ - values: [featureFlagLogic, ['featureFlags'], userLogic, ['hasAvailableFeature']], + values: [featureFlagLogic, ['featureFlags'], userLogic, ['hasAvailableFeature'], preflightLogic, ['preflight']], }), actions({ @@ -112,8 +115,17 @@ export const settingsLogic = kea([ s.settingId, s.featureFlags, s.hasAvailableFeature, + s.preflight, ], - (selectedLevel, selectedSectionId, sections, settingId, featureFlags, hasAvailableFeature): Setting[] => { + ( + selectedLevel, + selectedSectionId, + sections, + settingId, + featureFlags, + hasAvailableFeature, + preflight + ): Setting[] => { let settings: Setting[] = [] if (selectedSectionId) { @@ -128,22 +140,29 @@ export const settingsLogic = kea([ return settings.filter((x) => x.id === settingId) } - return settings.filter((x) => { - const isFlagConditionMet = !x.flag - ? true // No flag condition - : x.flag.startsWith('!') - ? !featureFlags[FEATURE_FLAGS[x.flag.slice(1)]] // Negated flag condition (!-prefixed) - : featureFlags[FEATURE_FLAGS[x.flag]] // Regular flag condition - if (x.flag && x.features) { - return x.features.some((feat) => hasAvailableFeature(feat)) || isFlagConditionMet - } else if (x.features) { - return x.features.some((feat) => hasAvailableFeature(feat)) - } else if (x.flag) { - return isFlagConditionMet - } + return settings + .filter((x) => { + const isFlagConditionMet = !x.flag + ? true // No flag condition + : x.flag.startsWith('!') + ? !featureFlags[FEATURE_FLAGS[x.flag.slice(1)]] // Negated flag condition (!-prefixed) + : featureFlags[FEATURE_FLAGS[x.flag]] // Regular flag condition + if (x.flag && x.features) { + return x.features.some((feat) => hasAvailableFeature(feat)) || isFlagConditionMet + } else if (x.features) { + return x.features.some((feat) => hasAvailableFeature(feat)) + } else if (x.flag) { + return isFlagConditionMet + } - return true - }) + return true + }) + .filter((x) => { + if (x.hideOn?.includes(Realm.Cloud) && preflight?.cloud) { + return false + } + return true + }) }, ], }), diff --git a/frontend/src/scenes/settings/types.ts b/frontend/src/scenes/settings/types.ts index 0a7addd24f2e3..99c83027a9ba1 100644 --- a/frontend/src/scenes/settings/types.ts +++ b/frontend/src/scenes/settings/types.ts @@ -1,6 +1,6 @@ import { EitherMembershipLevel, FEATURE_FLAGS } from 'lib/constants' -import { AvailableFeature } from '~/types' +import { AvailableFeature, Realm } from '~/types' export type SettingsLogicProps = { logicKey?: string @@ -110,6 +110,7 @@ export type Setting = { */ flag?: FeatureFlagKey | `!${FeatureFlagKey}` features?: AvailableFeature[] + hideOn?: Realm[] } export type SettingSection = { diff --git a/frontend/src/scenes/surveys/SurveyCustomization.tsx b/frontend/src/scenes/surveys/SurveyCustomization.tsx index aa06683ead4c6..c6fe0b0cbeb4f 100644 --- a/frontend/src/scenes/surveys/SurveyCustomization.tsx +++ b/frontend/src/scenes/surveys/SurveyCustomization.tsx @@ -1,34 +1,35 @@ import { LemonButton, LemonCheckbox, LemonDialog, LemonInput, LemonSelect } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' +import { useValues } from 'kea' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { upgradeModalLogic } from 'lib/components/UpgradeModal/upgradeModalLogic' import { LemonField } from 'lib/lemon-ui/LemonField' -import { surveyLogic } from 'scenes/surveys/surveyLogic' -import { - AvailableFeature, - MultipleSurveyQuestion, - RatingSurveyQuestion, - SurveyAppearance as SurveyAppearanceType, - SurveyQuestion, - SurveyQuestionType, -} from '~/types' +import { AvailableFeature, SurveyAppearance as SurveyAppearanceType } from '~/types' import { defaultSurveyAppearance } from './constants' import { surveysLogic } from './surveysLogic' interface CustomizationProps { appearance: SurveyAppearanceType - surveyQuestionItem: RatingSurveyQuestion | SurveyQuestion | MultipleSurveyQuestion + customizeRatingButtons: boolean + customizePlaceholderText: boolean + hasBranchingLogic: boolean + deleteBranchingLogic?: () => void onAppearanceChange: (appearance: SurveyAppearanceType) => void } interface WidgetCustomizationProps extends Omit {} -export function Customization({ appearance, surveyQuestionItem, onAppearanceChange }: CustomizationProps): JSX.Element { +export function Customization({ + appearance, + customizeRatingButtons, + customizePlaceholderText, + hasBranchingLogic, + onAppearanceChange, + deleteBranchingLogic, +}: CustomizationProps): JSX.Element { const { surveysStylingAvailable } = useValues(surveysLogic) - const { surveyShufflingQuestionsAvailable, hasBranchingLogic } = useValues(surveyLogic) - const { deleteBranchingLogic } = useActions(surveyLogic) + const surveyShufflingQuestionsAvailable = true const surveyShufflingQuestionsDisabledReason = surveyShufflingQuestionsAvailable ? '' : 'Please add more than one question to the survey to enable shuffling questions' @@ -41,72 +42,85 @@ export function Customization({ appearance, surveyQuestionItem, onAppearanceChan <> )} -
Background color
- onAppearanceChange({ ...appearance, backgroundColor })} - disabled={!surveysStylingAvailable} - /> -
Border color
- onAppearanceChange({ ...appearance, borderColor })} - disabled={!surveysStylingAvailable} - /> + + onAppearanceChange({ ...appearance, backgroundColor })} + disabled={!surveysStylingAvailable} + /> + + + onAppearanceChange({ ...appearance, borderColor })} + disabled={!surveysStylingAvailable} + /> + <> -
Position
-
- {['left', 'center', 'right'].map((position) => { - return ( - onAppearanceChange({ ...appearance, position })} - active={appearance.position === position} - disabledReason={ - surveysStylingAvailable - ? null - : 'Upgrade your plan to customize survey position.' - } - > - {position} - - ) - })} -
+ +
+ {['left', 'center', 'right'].map((position) => { + return ( + onAppearanceChange({ ...appearance, position })} + active={appearance.position === position} + disabledReason={ + surveysStylingAvailable + ? null + : 'Upgrade your plan to customize survey position.' + } + > + {position} + + ) + })} +
+
- {surveyQuestionItem.type === SurveyQuestionType.Rating && ( + {customizeRatingButtons && ( <> -
Rating button color
- onAppearanceChange({ ...appearance, ratingButtonColor })} - disabled={!surveysStylingAvailable} - /> -
Rating button active color
- - onAppearanceChange({ ...appearance, ratingButtonActiveColor }) - } - disabled={!surveysStylingAvailable} - /> + + + onAppearanceChange({ ...appearance, ratingButtonColor }) + } + disabled={!surveysStylingAvailable} + /> + + + + onAppearanceChange({ ...appearance, ratingButtonActiveColor }) + } + disabled={!surveysStylingAvailable} + /> + )} -
Button color
- onAppearanceChange({ ...appearance, submitButtonColor })} - disabled={!surveysStylingAvailable} - /> -
Button text color
- onAppearanceChange({ ...appearance, submitButtonTextColor })} - disabled={!surveysStylingAvailable} - /> + + onAppearanceChange({ ...appearance, submitButtonColor })} + disabled={!surveysStylingAvailable} + /> + + + + + onAppearanceChange({ ...appearance, submitButtonTextColor }) + } + disabled={!surveysStylingAvailable} + /> + @@ -119,14 +133,15 @@ export function Customization({ appearance, surveyQuestionItem, onAppearanceChan defaultValue="99999" /> - {surveyQuestionItem.type === SurveyQuestionType.Open && ( + {customizePlaceholderText && ( <> -
Placeholder text
- onAppearanceChange({ ...appearance, placeholder })} - disabled={!surveysStylingAvailable} - /> + + onAppearanceChange({ ...appearance, placeholder })} + disabled={!surveysStylingAvailable} + /> + )}
@@ -168,7 +183,9 @@ export function Customization({ appearance, surveyQuestionItem, onAppearanceChan children: 'Continue', status: 'danger', onClick: () => { - deleteBranchingLogic() + if (deleteBranchingLogic) { + deleteBranchingLogic() + } onAppearanceChange({ ...appearance, shuffleQuestions: true }) }, }, diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 5c96d2b6442bf..32d1636e492d9 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -34,6 +34,7 @@ import { LinkSurveyQuestion, RatingSurveyQuestion, SurveyQuestion, + SurveyQuestionType, SurveyType, SurveyUrlMatchType, } from '~/types' @@ -468,6 +469,16 @@ export default function SurveyEdit(): JSX.Element { Feedback button customization
{ onChange(appearance) @@ -479,7 +490,14 @@ export default function SurveyEdit(): JSX.Element { )} { onChange(appearance) }} @@ -871,6 +889,9 @@ export default function SurveyEdit(): JSX.Element { if (newValue === 'once') { setSurveyValue('iteration_count', 0) setSurveyValue('iteration_frequency_days', 0) + } else if (newValue === 'recurring') { + setSurveyValue('iteration_count', 1) + setSurveyValue('iteration_frequency_days', 90) } }} options={[ @@ -881,7 +902,7 @@ export default function SurveyEdit(): JSX.Element { }, { value: 'recurring', - label: 'Repeat on a Schedule', + label: 'Repeat on a schedule', 'data-attr': 'survey-iteration-frequency-days', disabledReason: surveysRecurringScheduleDisabledReason, }, diff --git a/frontend/src/scenes/surveys/SurveyTemplates.tsx b/frontend/src/scenes/surveys/SurveyTemplates.tsx index 266bac2bfcbef..692205eedda26 100644 --- a/frontend/src/scenes/surveys/SurveyTemplates.tsx +++ b/frontend/src/scenes/surveys/SurveyTemplates.tsx @@ -1,10 +1,11 @@ import './SurveyTemplates.scss' import { LemonButton } from '@posthog/lemon-ui' -import { useActions } from 'kea' +import { useActions, useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { SceneExport } from 'scenes/sceneTypes' +import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { Survey } from '~/types' @@ -20,6 +21,10 @@ export const scene: SceneExport = { export function SurveyTemplates(): JSX.Element { const { setSurveyTemplateValues } = useActions(surveyLogic({ id: 'new' })) const { reportSurveyTemplateClicked } = useActions(eventUsageLogic) + const { currentTeam } = useValues(teamLogic) + const surveyAppearance = { + ...currentTeam?.survey_config?.appearance, + } return ( <> @@ -48,7 +53,11 @@ export function SurveyTemplates(): JSX.Element { setSurveyTemplateValues({ name: template.templateType, questions: template.questions, - appearance: { ...defaultSurveyAppearance, ...template.appearance }, + appearance: { + ...defaultSurveyAppearance, + ...template.appearance, + ...surveyAppearance, + }, }) reportSurveyTemplateClicked(template.templateType) }} @@ -69,6 +78,7 @@ export function SurveyTemplates(): JSX.Element { ...defaultSurveyAppearance, whiteLabel: true, ...template.appearance, + ...surveyAppearance, }, } as Survey } diff --git a/frontend/src/scenes/surveys/Surveys.stories.tsx b/frontend/src/scenes/surveys/Surveys.stories.tsx index 467ae13e86a37..624c993b3e559 100644 --- a/frontend/src/scenes/surveys/Surveys.stories.tsx +++ b/frontend/src/scenes/surveys/Surveys.stories.tsx @@ -2,6 +2,7 @@ import { Meta, StoryFn } from '@storybook/react' import { router } from 'kea-router' import { useEffect } from 'react' import { App } from 'scenes/App' +import { SurveysTabs } from 'scenes/surveys/surveysLogic' import { urls } from 'scenes/urls' import { mswDecorator, useStorybookMocks } from '~/mocks/browser' @@ -220,7 +221,7 @@ const meta: Meta = { }`]: toPaginatedResponse([MOCK_SURVEY_WITH_RELEASE_CONS.targeting_flag]), }, post: { - '/api/projects/:team_id/query/': async (req, res, ctx) => { + '/api/environments/:team_id/query/': async (req, res, ctx) => { const body = await req.json() if (body.kind == 'EventsQuery') { return res(ctx.json(MOCK_SURVEY_RESULTS)) @@ -244,6 +245,13 @@ export const SurveysList: StoryFn = () => { return } +export const SurveysGlobalSettings: StoryFn = () => { + useEffect(() => { + router.actions.push(urls.surveys(SurveysTabs.Settings)) + }, []) + return +} + export const NewSurvey: StoryFn = () => { useEffect(() => { router.actions.push(urls.survey('new')) diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx index de73aff73789d..12485ace3a77a 100644 --- a/frontend/src/scenes/surveys/Surveys.tsx +++ b/frontend/src/scenes/surveys/Surveys.tsx @@ -22,19 +22,24 @@ import { dayjs } from 'lib/dayjs' import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { More } from 'lib/lemon-ui/LemonButton/More' +import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonTableColumn } from 'lib/lemon-ui/LemonTable' import { createdAtColumn, createdByColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import stringWithWBR from 'lib/utils/stringWithWBR' +import { useState } from 'react' import { LinkedHogFunctions } from 'scenes/pipeline/hogfunctions/list/LinkedHogFunctions' import { SceneExport } from 'scenes/sceneTypes' +import { SurveyAppearancePreview } from 'scenes/surveys/SurveyAppearancePreview' +import { Customization } from 'scenes/surveys/SurveyCustomization' +import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' import { ActivityScope, ProductKey, ProgressStatus, Survey } from '~/types' -import { SurveyQuestionLabel } from './constants' +import { defaultSurveyAppearance, NEW_SURVEY, SurveyQuestionLabel } from './constants' import { openSurveysSettingsDialog } from './SurveySettings' import { getSurveyStatus, surveysLogic, SurveysTabs } from './surveysLogic' @@ -54,10 +59,23 @@ export function Surveys(): JSX.Element { filters, showSurveysDisabledBanner, tab, + globalSurveyAppearanceConfigAvailable, } = useValues(surveysLogic) const { deleteSurvey, updateSurvey, setSearchTerm, setSurveysFilters, setTab } = useActions(surveysLogic) + const { user } = useValues(userLogic) + const { updateCurrentTeam } = useActions(teamLogic) + const { currentTeam } = useValues(teamLogic) + const [editableSurveyConfig, setEditableSurveyConfig] = useState( + currentTeam?.survey_config?.appearance || defaultSurveyAppearance + ) + + const [templatedSurvey, setTemplatedSurvey] = useState(NEW_SURVEY) + + if (templatedSurvey.appearance === defaultSurveyAppearance) { + templatedSurvey.appearance = editableSurveyConfig + } const shouldShowEmptyState = !surveysLoading && surveys.length === 0 const showLinkedHogFunctions = useFeatureFlag('HOG_FUNCTIONS_LINKED') @@ -111,12 +129,65 @@ export function Surveys(): JSX.Element { { key: SurveysTabs.Archived, label: 'Archived' }, showLinkedHogFunctions ? { key: SurveysTabs.Notifications, label: 'Notifications' } : null, { key: SurveysTabs.History, label: 'History' }, + globalSurveyAppearanceConfigAvailable ? { key: SurveysTabs.Settings, label: 'Settings' } : null, ]} /> + {tab === SurveysTabs.Settings && ( + <> +
+ + These settings apply to new surveys in this organization. + - {tab === SurveysTabs.History ? ( - - ) : tab === SurveysTabs.Notifications ? ( +
+ {globalSurveyAppearanceConfigAvailable && ( + { + updateCurrentTeam({ + survey_config: { + ...currentTeam?.survey_config, + appearance: { + ...currentTeam?.survey_config?.appearance, + ...editableSurveyConfig, + }, + }, + }) + }} + > + Save settings + + )} +
+ +
+ { + setEditableSurveyConfig({ + ...editableSurveyConfig, + ...appearance, + }) + setTemplatedSurvey({ + ...templatedSurvey, + ...{ appearance: appearance }, + }) + }} + /> +
+
+ {globalSurveyAppearanceConfigAvailable && ( + + )} +
+
+ + )} + {tab === SurveysTabs.Notifications && ( <>

Get notified whenever a survey result is submitted

- ) : ( + )} + + {tab === SurveysTabs.History && } + + {(tab === SurveysTabs.Active || tab === SurveysTabs.Archived) && ( <>
diff --git a/frontend/src/scenes/surveys/surveyLogic.test.ts b/frontend/src/scenes/surveys/surveyLogic.test.ts index 38223aa0c0fdc..9827f071ed65b 100644 --- a/frontend/src/scenes/surveys/surveyLogic.test.ts +++ b/frontend/src/scenes/surveys/surveyLogic.test.ts @@ -213,7 +213,7 @@ describe('multiple choice survey logic', () => { '/api/projects/:team/surveys/responses_count': () => [200, {}], }, post: { - '/api/projects/:team/query/': () => [ + '/api/environments/:team_id/query/': () => [ 200, { results: [ @@ -263,7 +263,7 @@ describe('single choice survey logic', () => { '/api/projects/:team/surveys/responses_count': () => [200, {}], }, post: { - '/api/projects/:team/query/': () => [ + '/api/environments/:team_id/query/': () => [ 200, { results: [ @@ -313,7 +313,7 @@ describe('multiple choice survey with open choice logic', () => { '/api/projects/:team/surveys/responses_count': () => [200, {}], }, post: { - '/api/projects/:team/query/': () => [ + '/api/environments/:team_id/query/': () => [ 200, { results: [ @@ -363,7 +363,7 @@ describe('single choice survey with open choice logic', () => { '/api/projects/:team/surveys/responses_count': () => [200, {}], }, post: { - '/api/projects/:team/query/': () => [ + '/api/environments/:team_id/query/': () => [ 200, { results: [ diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index a9a1262656e2b..0938a75ff6f27 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -9,6 +9,7 @@ import { featureFlagLogic as enabledFlagLogic } from 'lib/logic/featureFlagLogic import { hasFormErrors, isObject } from 'lib/utils' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { Scene } from 'scenes/sceneTypes' +import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { DataTableNode, HogQLQuery, InsightVizNode, NodeKind } from '~/queries/schema' @@ -28,7 +29,7 @@ import { SurveyUrlMatchType, } from '~/types' -import { defaultSurveyFieldValues, NEW_SURVEY, NewSurvey } from './constants' +import { defaultSurveyAppearance, defaultSurveyFieldValues, NEW_SURVEY, NewSurvey } from './constants' import type { surveyLogicType } from './surveyLogicType' import { surveysLogic } from './surveysLogic' import { sanitizeHTML } from './utils' @@ -196,9 +197,23 @@ export const surveyLogic = kea([ } } if (props.id === 'new' && router.values.hashParams.fromTemplate) { - return values.survey + const templatedSurvey = values.survey + templatedSurvey.appearance = { + ...defaultSurveyAppearance, + ...teamLogic.values.currentTeam?.survey_config?.appearance, + ...templatedSurvey.appearance, + } + return templatedSurvey + } + + const newSurvey = NEW_SURVEY + newSurvey.appearance = { + ...defaultSurveyAppearance, + ...teamLogic.values.currentTeam?.survey_config?.appearance, + ...newSurvey.appearance, } - return { ...NEW_SURVEY } + + return newSurvey }, createSurvey: async (surveyPayload: Partial) => { return await api.surveys.create(sanitizeQuestions(surveyPayload)) diff --git a/frontend/src/scenes/surveys/surveysLogic.tsx b/frontend/src/scenes/surveys/surveysLogic.tsx index 806379d9e441a..4f67264908ef3 100644 --- a/frontend/src/scenes/surveys/surveysLogic.tsx +++ b/frontend/src/scenes/surveys/surveysLogic.tsx @@ -19,6 +19,7 @@ export enum SurveysTabs { Archived = 'archived', Notifications = 'notifications', History = 'history', + Settings = 'settings', } export function getSurveyStatus(survey: Survey): ProgressStatus { @@ -164,6 +165,10 @@ export const surveysLogic = kea([ (s) => [s.hasAvailableFeature], (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_STYLING), ], + globalSurveyAppearanceConfigAvailable: [ + (s) => [s.hasAvailableFeature], + (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_STYLING), + ], surveysHTMLAvailable: [ (s) => [s.hasAvailableFeature], (hasAvailableFeature) => hasAvailableFeature(AvailableFeature.SURVEYS_TEXT_HTML), diff --git a/frontend/src/scenes/teamActivityDescriber.tsx b/frontend/src/scenes/teamActivityDescriber.tsx index ed3bb43d043b8..7fd8e6cdfe289 100644 --- a/frontend/src/scenes/teamActivityDescriber.tsx +++ b/frontend/src/scenes/teamActivityDescriber.tsx @@ -13,7 +13,7 @@ import { Link } from 'lib/lemon-ui/Link' import { isObject, pluralize } from 'lib/utils' import { urls } from 'scenes/urls' -import { ActivityScope, TeamType } from '~/types' +import { ActivityScope, TeamSurveyConfigType, TeamType } from '~/types' const teamActionsMapping: Record< keyof TeamType, @@ -37,6 +37,17 @@ const teamActionsMapping: Record< ], } }, + session_recording_url_trigger_config(change: ActivityChange | undefined): ChangeMapping | null { + const before = change?.before + const after = change?.after + if (before === null && after === null) { + return null + } + + return { + description: [<>Changed session replay URL triggers], + } + }, capture_console_log_opt_in(change: ActivityChange | undefined): ChangeMapping | null { return { description: [<>{change?.after ? 'enabled' : 'disabled'} console log capture in session replay] } }, @@ -139,6 +150,48 @@ const teamActionsMapping: Record< ], } }, + survey_config: (change: ActivityChange | undefined): ChangeMapping | null => { + const before = change!.before as TeamSurveyConfigType + const after = change!.after as TeamSurveyConfigType + const descriptions = [] + const preamble = 'Survey Configuration : ' + if (before === undefined) { + descriptions.push('Survey Configuration was enabled') + } + + const propertyChangeDesc = ( + name: string, + callback: (config: TeamSurveyConfigType) => string | undefined + ): void => { + if (callback(before) !== callback(after)) { + descriptions.push(`${preamble} ${name} was changed from "${callback(before)}" to "${callback(after)}"`) + } + } + + if (before?.appearance?.whiteLabel !== after?.appearance?.whiteLabel) { + descriptions.push( + `${preamble} Survey white labeling was ${after?.appearance?.whiteLabel ? 'enabled' : 'disabled'}` + ) + } + + if (before?.appearance?.displayThankYouMessage !== after?.appearance?.displayThankYouMessage) { + descriptions.push( + `${preamble} displayThankYouMessage was ${after?.appearance?.whiteLabel ? 'enabled' : 'disabled'}` + ) + } + + propertyChangeDesc('backgroundColor', (c) => c?.appearance?.backgroundColor) + propertyChangeDesc('submitButtonColor', (c) => c?.appearance?.submitButtonColor) + propertyChangeDesc('submitButtonTextColor', (c) => c?.appearance?.submitButtonTextColor) + propertyChangeDesc('ratingButtonColor', (c) => c?.appearance?.ratingButtonColor) + propertyChangeDesc('ratingButtonActiveColor', (c) => c?.appearance?.ratingButtonActiveColor) + propertyChangeDesc('borderColor', (c) => c?.appearance?.borderColor) + propertyChangeDesc('placeholder', (c) => c?.appearance?.placeholder) + propertyChangeDesc('thankYouMessageHeader', (c) => c?.appearance?.thankYouMessageHeader) + propertyChangeDesc('position', (c) => c?.appearance?.position) + + return { description: descriptions } + }, session_replay_config(change: ActivityChange | undefined): ChangeMapping | null { // TODO we'll eventually need a deeper mapping for this nested object const after = change?.after diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.stories.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.stories.tsx index f56637999e527..5d981f6004fec 100644 --- a/frontend/src/scenes/trends/persons-modal/PersonsModal.stories.tsx +++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.stories.tsx @@ -86,7 +86,7 @@ export const Empty: StoryFn = () => { return (
- +
) } diff --git a/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts b/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts index f2666ba43f58f..ba343a2ffe02d 100644 --- a/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts +++ b/frontend/src/scenes/trends/persons-modal/peronsModalLogic.test.ts @@ -11,7 +11,7 @@ describe('personsModalLogic', () => { beforeEach(() => { useMocks({ get: { - 'api/projects/:team_id/persons/trends': {}, + 'api/environments/:team_id/persons/trends': {}, }, }) initKeaTests() diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 146c561e225e1..05f0372b7f8c8 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -3,7 +3,7 @@ import { AlertType } from 'lib/components/Alerts/types' import { getCurrentTeamId } from 'lib/utils/getAppContext' import { ExportOptions } from '~/exporter/types' -import { HogQLFilters, Node } from '~/queries/schema' +import { HogQLFilters, HogQLVariable, Node } from '~/queries/schema' import { ActionType, ActivityTab, @@ -89,8 +89,20 @@ export const urls = { } ).url, insightEdit: (id: InsightShortId): string => `/insights/${id}/edit`, - insightView: (id: InsightShortId, dashboardId?: number): string => - `/insights/${id}${dashboardId !== undefined ? `?dashboard=${dashboardId}` : ''}`, + insightView: ( + id: InsightShortId, + dashboardId?: number, + variablesOverride?: Record + ): string => { + const params = [ + { param: 'dashboard', value: dashboardId }, + { param: 'variables_override', value: variablesOverride }, + ] + .filter((n) => Boolean(n.value)) + .map((n) => `${n.param}=${encodeURIComponent(JSON.stringify(n.value))}`) + .join('&') + return `/insights/${id}${params.length ? `?${params}` : ''}` + }, insightSubcriptions: (id: InsightShortId): string => `/insights/${id}/subscriptions`, insightSubcription: (id: InsightShortId, subscriptionId: string): string => `/insights/${id}/subscriptions/${subscriptionId}`, diff --git a/frontend/src/scenes/web-analytics/SessionAttributionExplorer/sessionAttributionExplorer.stories.tsx b/frontend/src/scenes/web-analytics/SessionAttributionExplorer/sessionAttributionExplorer.stories.tsx index b98489824fd5f..6ba2259fd99ef 100644 --- a/frontend/src/scenes/web-analytics/SessionAttributionExplorer/sessionAttributionExplorer.stories.tsx +++ b/frontend/src/scenes/web-analytics/SessionAttributionExplorer/sessionAttributionExplorer.stories.tsx @@ -16,13 +16,13 @@ const meta: Meta = { decorators: [ mswDecorator({ get: { - '/api/projects/:team_id/query/:id/': async (_, res, ctx) => { + '/api/environments/:team_id/query/:id/': async (_, res, ctx) => { // eslint-disable-next-line @typescript-eslint/no-var-requires return res(ctx.json(require('./__mocks__/sessionAttributionQueryStatus.json'))) }, }, post: { - '/api/projects/:team_id/query/': async (_, res, ctx) => { + '/api/environments/:team_id/query/': async (_, res, ctx) => { // eslint-disable-next-line @typescript-eslint/no-var-requires return res(ctx.json(require('./__mocks__/sessionAttributionQuery.json'))) }, diff --git a/frontend/src/stories/How to mock requests.stories.mdx b/frontend/src/stories/How to mock requests.stories.mdx index dc2562938cbe8..6dd638be8da66 100644 --- a/frontend/src/stories/How to mock requests.stories.mdx +++ b/frontend/src/stories/How to mock requests.stories.mdx @@ -65,7 +65,7 @@ useStorybookMocks({ '/api/status_shorthand': () => [500, { error: 'Error text' }] // complicated param handling - '/api/projects/:team/insights': (req, _, ctx) => { + '/api/environments/:team_id/insights': (req, _, ctx) => { const team = req.params['team'] const shortId = req.url.searchParams.get('short_id') if (shortId === 'my_insight') { diff --git a/frontend/src/test/init.ts b/frontend/src/test/init.ts index 13597c8eebb25..c39467f2953fe 100644 --- a/frontend/src/test/init.ts +++ b/frontend/src/test/init.ts @@ -1,24 +1,30 @@ import { createMemoryHistory } from 'history' import { testUtilsPlugin } from 'kea-test-utils' -import { MOCK_DEFAULT_TEAM } from 'lib/api.mock' +import { MOCK_DEFAULT_PROJECT, MOCK_DEFAULT_TEAM } from 'lib/api.mock' import { dayjs } from 'lib/dayjs' import posthog from 'posthog-js' import { organizationLogic } from 'scenes/organizationLogic' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' +import { projectLogic } from 'scenes/projectLogic' import { teamLogic } from 'scenes/teamLogic' import { initKea } from '~/initKea' -import { AppContext, TeamType } from '~/types' +import { AppContext, ProjectType, TeamType } from '~/types' process.on('unhandledRejection', (err) => { console.warn(err) }) -export function initKeaTests(mountCommonLogic = true, teamForWindowContext: TeamType = MOCK_DEFAULT_TEAM): void { +export function initKeaTests( + mountCommonLogic = true, + teamForWindowContext: TeamType = MOCK_DEFAULT_TEAM, + projectForWindowContext: ProjectType = MOCK_DEFAULT_PROJECT +): void { dayjs.tz.setDefault('UTC') window.POSTHOG_APP_CONTEXT = { ...window.POSTHOG_APP_CONTEXT, current_team: teamForWindowContext, + current_project: projectForWindowContext, } as unknown as AppContext posthog.init('no token', { autocapture: false, @@ -37,6 +43,7 @@ export function initKeaTests(mountCommonLogic = true, teamForWindowContext: Team if (mountCommonLogic) { preflightLogic.mount() teamLogic.mount() + projectLogic.mount() organizationLogic.mount() } } diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 33cacd4590382..0a28032ade19e 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -37,6 +37,7 @@ import type { DatabaseSchemaField, HogQLQuery, HogQLQueryModifiers, + HogQLVariable, InsightVizNode, Node, QueryStatus, @@ -484,6 +485,9 @@ export interface ProjectType extends ProjectBasicType { created_at: string } +export interface TeamSurveyConfigType { + appearance?: SurveyAppearance +} export interface TeamType extends TeamBasicType { created_at: string updated_at: string @@ -505,9 +509,11 @@ export interface TeamType extends TeamBasicType { | undefined | null session_replay_config: { record_canvas?: boolean; ai_config?: SessionRecordingAIConfig } | undefined | null + survey_config?: TeamSurveyConfigType autocapture_exceptions_opt_in: boolean autocapture_web_vitals_opt_in?: boolean autocapture_web_vitals_allowed_metrics?: SupportedWebVitalsMetrics[] + session_recording_url_trigger_config?: SessionReplayUrlTriggerConfig[] surveys_opt_in?: boolean heatmaps_opt_in?: boolean autocapture_exceptions_errors_to_ignore: string[] @@ -705,6 +711,7 @@ export enum PipelineNodeTab { History = 'history', Schemas = 'schemas', Syncs = 'syncs', + SourceConfiguration = 'source configuration', } export enum ProgressStatus { @@ -1819,6 +1826,7 @@ export type DashboardTemplateScope = 'team' | 'global' | 'feature_flag' export interface DashboardType extends DashboardBasicType { tiles: DashboardTile[] filters: DashboardFilter + variables?: Record } export enum TemplateAvailabilityContext { @@ -2657,6 +2665,8 @@ export interface InsightLogicProps { /** Dashboard filters to override the ones in the query */ filtersOverride?: DashboardFilter | null + /** Dashboard variables to override the ones in the query */ + variablesOverride?: Record | null } export interface SetInsightOptions { @@ -3650,7 +3660,13 @@ export enum EventDefinitionType { EventPostHog = 'event_posthog', } -export type IntegrationKind = 'slack' | 'salesforce' | 'hubspot' | 'google-pubsub' | 'google-cloud-storage' +export type IntegrationKind = + | 'slack' + | 'salesforce' + | 'hubspot' + | 'google-pubsub' + | 'google-cloud-storage' + | 'google-ads' export interface IntegrationType { id: number @@ -3822,6 +3838,7 @@ export enum ActivityScope { INSIGHT = 'Insight', PLUGIN = 'Plugin', PLUGIN_CONFIG = 'PluginConfig', + HOG_FUNCTION = 'HogFunction', DATA_MANAGEMENT = 'DataManagement', EVENT_DEFINITION = 'EventDefinition', PROPERTY_DEFINITION = 'PropertyDefinition', @@ -3913,7 +3930,7 @@ export interface DataWarehouseTable { format: DataWarehouseTableTypes url_pattern: string credential: DataWarehouseCredential - external_data_source?: ExternalDataStripeSource + external_data_source?: ExternalDataSource external_schema?: SimpleExternalDataSourceSchema } @@ -3969,7 +3986,7 @@ export interface ExternalDataSourceCreatePayload { prefix: string payload: Record } -export interface ExternalDataStripeSource { +export interface ExternalDataSource { id: string source_id: string connection_id: string @@ -3979,6 +3996,7 @@ export interface ExternalDataStripeSource { last_run_at?: Dayjs schemas: ExternalDataSourceSchema[] sync_frequency: DataWarehouseSyncInterval + job_inputs: Record } export interface SimpleExternalDataSourceSchema { id: string @@ -4362,8 +4380,6 @@ export interface SourceConfig { caption: string | React.ReactNode fields: SourceFieldConfig[] disabledReason?: string | null - showPrefix?: (payload: Record) => boolean - showSourceForm?: (payload: Record) => boolean oauthPayload?: string[] } @@ -4593,3 +4609,8 @@ export type AppMetricsV2RequestParams = { interval?: 'hour' | 'day' | 'week' breakdown_by?: 'name' | 'kind' } + +export type SessionReplayUrlTriggerConfig = { + url: string + matching: 'regex' +} diff --git a/funnel-udf/src/trends.rs b/funnel-udf/src/trends.rs index 0b9cdd259e247..5f5da9a2ed953 100644 --- a/funnel-udf/src/trends.rs +++ b/funnel-udf/src/trends.rs @@ -3,6 +3,7 @@ use std::str::FromStr; use itertools::Itertools; use serde::{Deserialize, Serialize}; use serde_json::{json, Value}; +use uuid::Uuid; use crate::PropVal; fn deserialize_number_from_string<'de, D>(deserializer: D) -> Result @@ -15,8 +16,7 @@ where #[derive(Clone, Deserialize)] struct EnteredTimestamp { - timestamp: f64, - timings: Vec, + timestamp: f64 } #[derive(Clone, Deserialize)] @@ -24,6 +24,7 @@ struct Event { timestamp: f64, #[serde(deserialize_with = "deserialize_number_from_string")] interval_start: u64, + uuid: Uuid, breakdown: PropVal, steps: Vec, } @@ -40,10 +41,16 @@ struct Args { } #[derive(Serialize)] -struct ResultStruct(u64, i8, PropVal); +struct ResultStruct(u64, i8, PropVal, Uuid); + +struct IntervalData { + max_step: usize, + max_step_event_uuid: Uuid, + entered_timestamp: Vec, +} struct Vars { - interval_start_to_entered_timestamps: HashMap>, + interval_start_to_entered_timestamps: HashMap, } struct AggregateFunnelRow { @@ -53,7 +60,6 @@ struct AggregateFunnelRow { const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp { timestamp: 0.0, - timings: vec![], }; pub fn process_line(line: &str) -> Value { @@ -114,9 +120,9 @@ impl AggregateFunnelRow { // At this point, everything left in entered_timestamps is a failure, if it has made it to from_step - for entered_timestamp in vars.interval_start_to_entered_timestamps.values() { - if !self.results.contains_key(&(entered_timestamp[0].timestamp as u64)) && entered_timestamp[0].timings.len() > 0 { - self.results.insert(entered_timestamp[0].timestamp as u64, ResultStruct(entered_timestamp[0].timestamp as u64, -1, prop_val.clone() )); + for interval_data in vars.interval_start_to_entered_timestamps.values() { + if !self.results.contains_key(&(interval_data.entered_timestamp[0].timestamp as u64)) && interval_data.max_step >= args.from_step + 1 { + self.results.insert(interval_data.entered_timestamp[0].timestamp as u64, ResultStruct(interval_data.entered_timestamp[0].timestamp as u64, -1, prop_val.clone(), interval_data.max_step_event_uuid)); } } } @@ -141,36 +147,32 @@ impl AggregateFunnelRow { if step == 1 { if !vars.interval_start_to_entered_timestamps.contains_key(&event.interval_start) && !self.results.contains_key(&event.interval_start) { let mut entered_timestamp = vec![DEFAULT_ENTERED_TIMESTAMP.clone(); args.num_steps + 1]; - entered_timestamp[0] = EnteredTimestamp { timestamp: event.interval_start as f64, timings: if args.from_step == 0 {vec![1.0]} else {vec![]} }; - entered_timestamp[1] = EnteredTimestamp { timestamp: event.timestamp, timings: vec![event.timestamp] }; - vars.interval_start_to_entered_timestamps.insert(event.interval_start, entered_timestamp); + entered_timestamp[0] = EnteredTimestamp { timestamp: event.interval_start as f64 }; + entered_timestamp[1] = EnteredTimestamp { timestamp: event.timestamp }; + vars.interval_start_to_entered_timestamps.insert(event.interval_start, IntervalData { max_step: 1, max_step_event_uuid: event.uuid, entered_timestamp: entered_timestamp }); } } else { - for entered_timestamp in vars.interval_start_to_entered_timestamps.values_mut() { - let in_match_window = (event.timestamp - entered_timestamp[step - 1].timestamp) <= args.conversion_window_limit as f64; - let already_reached_this_step = entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp; + for interval_data in vars.interval_start_to_entered_timestamps.values_mut() { + let in_match_window = (event.timestamp - interval_data.entered_timestamp[step - 1].timestamp) <= args.conversion_window_limit as f64; + let already_reached_this_step = interval_data.entered_timestamp[step].timestamp == interval_data.entered_timestamp[step - 1].timestamp; if in_match_window && !already_reached_this_step { if exclusion { return false; } let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown; if !is_unmatched_step_attribution { - entered_timestamp[step] = EnteredTimestamp { - timestamp: entered_timestamp[step - 1].timestamp, - timings: { - let mut timings = entered_timestamp[step - 1].timings.clone(); - timings.push(event.timestamp); - timings - }, + interval_data.entered_timestamp[step] = EnteredTimestamp { + timestamp: interval_data.entered_timestamp[step - 1].timestamp }; // check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps - if entered_timestamp[args.num_steps].timestamp != 0.0 { + if interval_data.entered_timestamp[args.num_steps].timestamp != 0.0 { self.results.insert( - entered_timestamp[0].timestamp as u64, - ResultStruct(entered_timestamp[0].timestamp as u64, 1, prop_val.clone()) + interval_data.entered_timestamp[0].timestamp as u64, + ResultStruct(interval_data.entered_timestamp[0].timestamp as u64, 1, prop_val.clone(), event.uuid) ); - } else if step == args.from_step + 1 { - entered_timestamp[0].timings.push(1.0) + } else if step > interval_data.max_step { + interval_data.max_step = step; + interval_data.max_step_event_uuid = event.uuid; } } } @@ -180,10 +182,10 @@ impl AggregateFunnelRow { // If a strict funnel, clear all of the steps that we didn't match to // If we are processing multiple events, skip this step, because ordering makes it complicated if args.funnel_order_type == "strict" { - for entered_timestamp in vars.interval_start_to_entered_timestamps.values_mut() { - for i in 1..entered_timestamp.len() { + for interval_data in vars.interval_start_to_entered_timestamps.values_mut() { + for i in 1..interval_data.entered_timestamp.len() { if !event.steps.contains(&(i as i8)) { - entered_timestamp[i] = DEFAULT_ENTERED_TIMESTAMP; + interval_data.entered_timestamp[i] = DEFAULT_ENTERED_TIMESTAMP; } } } diff --git a/latest_migrations.manifest b/latest_migrations.manifest index bd99d5cde082a..f8f13e49ea0ad 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0487_team_survey_config +posthog: 0494_team_project_non_null sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 4789622dfceb7..ae7bf250f3f0a 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -3,6 +3,54 @@ posthog/temporal/common/utils.py:0: note: This is likely because "from_activity" posthog/temporal/common/utils.py:0: error: Argument 2 to "__get__" of "classmethod" has incompatible type "type[HeartbeatType]"; expected "type[Never]" [arg-type] posthog/tasks/exports/ordered_csv_renderer.py:0: error: No return value expected [return-value] posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment] +posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload] posthog/utils.py:0: note: Possible overload variants: posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any] @@ -13,10 +61,6 @@ posthog/settings/data_stores.py:0: error: Name "DATABASE_URL" already defined on posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable] posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable] posthog/models/dashboard.py:0: error: Need type annotation for "insights" [var-annotated] -posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] -posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance -posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type -posthog/hogql/ast.py:0: error: Incompatible return value type (got "bool | None", expected "bool") [return-value] posthog/warehouse/data_load/service.py:0: error: Unsupported operand types for >= ("timedelta" and "None") [operator] posthog/warehouse/data_load/service.py:0: note: Left operand is of type "timedelta | None" posthog/warehouse/data_load/service.py:0: error: Incompatible return value type (got "tuple[timedelta | None, timedelta]", expected "tuple[timedelta, timedelta]") [return-value] @@ -29,6 +73,15 @@ posthog/models/subscription.py:0: error: Argument 2 to "SubscriptionResourceInfo posthog/models/exported_asset.py:0: error: Value of type variable "_StrOrPromiseT" of "slugify" cannot be "str | None" [type-var] posthog/models/action/action.py:0: error: Need type annotation for "events" [var-annotated] posthog/models/action/action.py:0: error: Argument 1 to "len" has incompatible type "str | None"; expected "Sized" [arg-type] +posthog/hogql/database/schema/numbers.py:0: error: Incompatible types in assignment (expression has type "dict[str, IntegerDatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] +posthog/hogql/database/schema/numbers.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance +posthog/hogql/database/schema/numbers.py:0: note: Consider using "Mapping" instead, which is covariant in the value type +posthog/hogql/ast.py:0: error: Incompatible return value type (got "bool | None", expected "bool") [return-value] +ee/models/license.py:0: error: Incompatible return value type (got "_T", expected "License | None") [return-value] +ee/models/license.py:0: error: Cannot use a covariant type variable as a parameter [misc] +ee/models/license.py:0: error: "_T" has no attribute "plan" [attr-defined] +ee/models/license.py:0: error: Incompatible return value type (got "str | bool", expected "bool") [return-value] +ee/models/explicit_team_membership.py:0: error: Incompatible return value type (got "int", expected "Level") [return-value] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "CTE") [assignment] @@ -37,18 +90,13 @@ posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "WindowExpr", variable has type "CTE") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "FieldAliasType", variable has type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType") [assignment] posthog/hogql/visitor.py:0: error: Incompatible types in assignment (expression has type "Type", variable has type "BaseTableType | SelectUnionQueryType | SelectQueryType | SelectQueryAliasType | SelectViewType") [assignment] -ee/models/license.py:0: error: Incompatible return value type (got "_T", expected "License | None") [return-value] -ee/models/license.py:0: error: Cannot use a covariant type variable as a parameter [misc] -ee/models/license.py:0: error: "_T" has no attribute "plan" [attr-defined] -ee/models/license.py:0: error: Incompatible return value type (got "str | bool", expected "bool") [return-value] -ee/models/explicit_team_membership.py:0: error: Incompatible return value type (got "int", expected "Level") [return-value] -posthog/hogql/resolver_utils.py:0: error: Argument 1 to "lookup_field_by_name" has incompatible type "SelectQueryType | SelectUnionQueryType"; expected "SelectQueryType" [arg-type] posthog/models/filters/mixins/simplify.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/hogql/resolver_utils.py:0: error: Argument 1 to "lookup_field_by_name" has incompatible type "SelectQueryType | SelectUnionQueryType"; expected "SelectQueryType" [arg-type] +posthog/helpers/dashboard_templates.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | Combinable") [assignment] posthog/hogql/parser.py:0: error: Item "None" of "list[Expr] | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: "None" has no attribute "text" [attr-defined] posthog/hogql/parser.py:0: error: Statement is unreachable [unreachable] -posthog/helpers/dashboard_templates.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str | Combinable") [assignment] posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] posthog/hogql/functions/cohort.py:0: error: Incompatible type for lookup 'team_id': (got "int | None", expected "str | int") [misc] posthog/hogql/database/schema/persons_pdi.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] @@ -58,8 +106,8 @@ posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instea posthog/hogql/database/schema/groups.py:0: error: Incompatible types in assignment (expression has type "dict[str, DatabaseField]", variable has type "dict[str, FieldOrTable]") [assignment] posthog/hogql/database/schema/groups.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instead, which is covariant in the value type -posthog/hogql/database/schema/persons.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] posthog/batch_exports/service.py:0: error: Argument 4 to "backfill_export" has incompatible type "datetime | None"; expected "datetime" [arg-type] +posthog/hogql/database/schema/persons.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] @@ -182,7 +230,6 @@ posthog/hogql/transforms/in_cohort.py:0: error: List item 0 has incompatible typ posthog/hogql/database/database.py:0: error: Argument "week_start_day" to "Database" has incompatible type "int | Any | None"; expected "WeekStartDay | None" [arg-type] posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type] -posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment] posthog/models/feature_flag/flag_matching.py:0: error: Statement is unreachable [unreachable] posthog/models/feature_flag/flag_matching.py:0: error: Value expression in dictionary comprehension has incompatible type "int"; expected type "Literal[0, 1, 2, 3, 4]" [misc] posthog/models/feature_flag/flag_matching.py:0: error: Value of type variable "_E" of "ExpressionWrapper" cannot be "object" [type-var] @@ -292,8 +339,8 @@ posthog/hogql/query.py:0: error: Incompatible types in assignment (expression ha posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" has incompatible type "LimitContext | None"; expected "LimitContext" [arg-type] posthog/hogql/query.py:0: error: "SelectQuery" has no attribute "select_queries" [attr-defined] posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectUnionQuery" cannot exist: would have incompatible method signatures [unreachable] -posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/queries/person_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] +posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/queries/event_query/event_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/hogql_query_runner.py:0: error: Statement is unreachable [unreachable] @@ -383,7 +430,23 @@ posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/test/test_feature_flag_analytics.py:0: error: Item "None" of "Dashboard | None" has no attribute "delete" [union-attr] -posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr] posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator] posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants: @@ -410,12 +473,19 @@ posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryvi posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "memoryview" of "bytes | memoryview | None" has no attribute "decode" [union-attr] posthog/tasks/exports/test/test_csv_exporter_renders.py:0: error: Item "None" of "bytes | memoryview | None" has no attribute "decode" [union-attr] +posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type] +posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] +posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] +posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "exclude_events" [attr-defined] +posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "include_events" [attr-defined] +posthog/management/commands/fix_future_person_created_at.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type] posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type] posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc] posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] +posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "properties_0" (hint: "properties_0: list[] = ...") [var-annotated] posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "properties_3" (hint: "properties_3: dict[, ] = ...") [var-annotated] posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py:0: error: Need type annotation for "filter" (hint: "filter: dict[, ] = ...") [var-annotated] @@ -428,6 +498,21 @@ posthog/hogql/test/test_timings.py:0: error: No overload variant of "__setitem__ posthog/hogql/test/test_timings.py:0: note: Possible overload variants: posthog/hogql/test/test_timings.py:0: note: def __setitem__(self, SupportsIndex, int, /) -> None posthog/hogql/test/test_timings.py:0: note: def __setitem__(self, slice, Iterable[int], /) -> None +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "expr" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "SelectUnionQueryType" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] +posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type] +posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr] posthog/hogql/test/test_property.py:0: error: Argument 1 to "_property_to_expr" of "TestProperty" has incompatible type "HogQLPropertyFilter"; expected "PropertyGroup | Property | dict[Any, Any] | list[Any]" [arg-type] posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type] posthog/hogql/test/test_printer.py:0: error: Argument 2 to "Database" has incompatible type "int"; expected "WeekStartDay | None" [arg-type] @@ -456,11 +541,10 @@ posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" ha posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr] posthog/hogql/test/_test_parser.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] +posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] +posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] +posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable] -posthog/hogql/ai.py:0: error: No overload variant of "__getitem__" of "tuple" matches argument type "str" [call-overload] -posthog/hogql/ai.py:0: note: Possible overload variants: -posthog/hogql/ai.py:0: note: def __getitem__(self, SupportsIndex, /) -> str | Any -posthog/hogql/ai.py:0: note: def __getitem__(self, slice, /) -> tuple[str | Any, ...] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/heatmaps/test/test_heatmaps_api.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] @@ -507,10 +591,31 @@ posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | Non posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/organization_feature_flag.py:0: error: Invalid index type "str | None" for "dict[str, int]"; expected type "str" [index] posthog/api/notebook.py:0: error: Incompatible types in assignment (expression has type "int", variable has type "str | None") [assignment] +posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] +posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] +posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "StripeSourcePayload") [assignment] +posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_create_source" has incompatible type "StripeSourcePayload"; expected "dict[Any, Any]" [arg-type] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment] posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment] posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment] +posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] +posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] +posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None +posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] +posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument 1 has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants: +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T +posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" has incompatible type "str"; expected "UUID" [arg-type] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def] posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def] @@ -553,28 +658,6 @@ posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" fo posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index] posthog/models/test/test_organization_model.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] -posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument 4 to "create_person_override" has incompatible type "int | None"; expected "int" [arg-type] -posthog/management/commands/sync_persons_to_clickhouse.py:0: error: Argument "group_type_index" to "raw_create_group_ch" has incompatible type "int"; expected "Literal[0, 1, 2, 3, 4]" [arg-type] -posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] -posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "exclude_events" [attr-defined] -posthog/management/commands/migrate_team.py:0: error: "BatchExportDestination" has no attribute "include_events" [attr-defined] -posthog/management/commands/fix_future_person_created_at.py:0: error: Argument "version" to "create_person" has incompatible type "int | None"; expected "int" [arg-type] -posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "expr" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "SelectUnionQueryType" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "SelectQueryType | SelectUnionQueryType | None" has no attribute "columns" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] -posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] -posthog/hogql/test/test_resolver.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "table" [union-attr] -posthog/hogql/test/test_resolver.py:0: error: Argument 1 to "clone_expr" has incompatible type "SelectQuery | SelectUnionQuery | Field | Any | None"; expected "Expr" [arg-type] -posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "alias" [union-attr] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] posthog/hogql/test/test_query.py:0: error: Module has no attribute "utc" [attr-defined] @@ -605,14 +688,12 @@ posthog/hogql/test/test_parser_python.py:0: error: Unsupported dynamic base clas posthog/hogql/test/test_parser_cpp.py:0: error: Unsupported dynamic base class "parser_test_factory" [misc] posthog/hogql/test/test_parse_string_python.py:0: error: Unsupported dynamic base class "parse_string_test_factory" [misc] posthog/hogql/test/test_parse_string_cpp.py:0: error: Unsupported dynamic base class "parse_string_test_factory" [misc] -posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] -posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] -posthog/hogql/functions/test/test_cohort.py:0: error: "TestCohort" has no attribute "snapshot" [attr-defined] posthog/hogql/database/test/test_view.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type] posthog/hogql/database/test/test_s3_table.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type] posthog/async_migrations/test/test_runner.py:0: error: Item "None" of "datetime | None" has no attribute "day" [union-attr] posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] +posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "tiles" [union-attr] posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "name" [union-attr] posthog/api/test/test_feature_flag.py:0: error: Item "None" of "Dashboard | None" has no attribute "description" [union-attr] @@ -673,22 +754,17 @@ posthog/admin/inlines/plugin_attachment_inline.py:0: note: Superclass: posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: HttpRequest, obj: Any | None = ...) -> bool posthog/admin/inlines/plugin_attachment_inline.py:0: note: Subclass: posthog/admin/inlines/plugin_attachment_inline.py:0: note: def has_delete_permission(self, request: Any, obj: Any) -> Any -posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has no attribute "pk" [union-attr] -posthog/admin/admins/team_admin.py:0: error: Item "None" of "Project | None" has no attribute "name" [union-attr] posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "pk" [union-attr] posthog/admin/admins/plugin_admin.py:0: error: Item "None" of "Organization | None" has no attribute "name" [union-attr] ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseTrendExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseFunnelExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Argument 4 to "ClickhouseSecondaryExperimentResult" has incompatible type "datetime | None"; expected "datetime" [arg-type] ee/clickhouse/views/experiments.py:0: error: Item "None" of "User | None" has no attribute "email" [union-attr] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "str"; expected "Type" [arg-type] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Incompatible types in assignment (expression has type "list[Any]", variable has type "dict[str, list[tuple[str, str]]]") [assignment] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: Possible overload variants: -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, /) -> Sequence[str] | None -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str] -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T -posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 1 has incompatible type "dict[str, list[tuple[str, str]]]"; expected "list[Any]" [arg-type] +posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required] posthog/session_recordings/session_recording_api.py:0: error: Argument "team_id" to "get_realtime_snapshots" has incompatible type "int"; expected "str" [arg-type] posthog/session_recordings/session_recording_api.py:0: error: Value of type variable "SupportsRichComparisonT" of "sorted" cannot be "str | None" [type-var] posthog/session_recordings/session_recording_api.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] @@ -699,14 +775,6 @@ posthog/queries/app_metrics/historical_exports.py:0: error: Argument 1 to "loads posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "toolbar_mode" [union-attr] posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "save" [union-attr] posthog/api/test/test_authentication.py:0: error: Module has no attribute "utc" [attr-defined] -posthog/admin/admins/plugin_config_admin.py:0: error: Item "None" of "Team | None" has no attribute "name" [union-attr] -posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] -posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] -posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] -posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "int", target has type "str") [assignment] -posthog/warehouse/external_data_source/source.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "StripeSourcePayload") [assignment] -posthog/warehouse/external_data_source/source.py:0: error: Argument 1 to "_create_source" has incompatible type "StripeSourcePayload"; expected "dict[Any, Any]" [arg-type] -posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] @@ -720,107 +788,51 @@ posthog/api/plugin.py:0: error: Incompatible type for "file_size" of "PluginAtta posthog/api/plugin.py:0: error: Item "None" of "IO[Any] | None" has no attribute "read" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "organization" [union-attr] posthog/api/plugin.py:0: error: Item "None" of "Team | None" has no attribute "id" [union-attr] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_run_updates.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required] -posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] -posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] -posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] -posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item] +posthog/admin/admins/plugin_config_admin.py:0: error: Item "None" of "Team | None" has no attribute "name" [union-attr] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_calls" (hint: "_execute_calls: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_cursors" (hint: "_cursors: list[] = ...") [var-annotated] posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item] posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "last_uploaded_part_timestamp" [attr-defined] posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "upload_state" [attr-defined] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "job_type" to "PipelineInputs" has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] +posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type] +posthog/api/query.py:0: error: Statement is unreachable [unreachable] +posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] +posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined] +posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "module" to "SourceInfo" has incompatible type Module | None; expected Module [arg-type] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore] +posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] +posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override] posthog/api/test/batch_exports/conftest.py:0: note: Superclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None] posthog/api/test/batch_exports/conftest.py:0: note: Subclass: posthog/api/test/batch_exports/conftest.py:0: note: def run(self, loop: Any) -> Any posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type] -posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value] -posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type] -posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload] -posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants: -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None -posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]] -posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore] -posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "job_type" to "PipelineInputs" has incompatible type "str"; expected "Type" [arg-type] -posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] -posthog/temporal/data_imports/workflow_activities/import_data.py:0: error: Argument "source_type" to "sql_source_for_type" has incompatible type "str"; expected "Type" [arg-type] +posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined] +posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] +posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] +posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] +posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value] posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment] @@ -863,21 +875,3 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index] posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index] -posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable] -posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value] -posthog/api/test/test_capture.py:0: error: Module has no attribute "utc" [attr-defined] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Unpacked dict entry 0 has incompatible type "Collection[str]"; expected "SupportsKeysAndGetItem[str, dict[Never, Never]]" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "str": "float"; expected "str": "int" [dict-item] -posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore] diff --git a/package.json b/package.json index 890721a2a9277..2c589fb56a0ce 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "@microlink/react-json-view": "^1.21.3", "@monaco-editor/react": "4.6.0", "@posthog/hogvm": "^1.0.54", - "@posthog/icons": "0.8.4", + "@posthog/icons": "0.8.5", "@posthog/plugin-scaffold": "^1.4.4", "@react-hook/size": "^2.1.2", "@rrweb/types": "2.0.0-alpha.13", @@ -144,6 +144,7 @@ "kea-waitfor": "^0.2.1", "kea-window-values": "^3.0.0", "lodash.merge": "^4.6.2", + "lodash.uniqby": "^4.7.0", "maplibre-gl": "^3.5.1", "md5": "^2.3.0", "monaco-editor": "^0.49.0", @@ -152,7 +153,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.167.1", + "posthog-js": "1.174.2", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -233,6 +234,7 @@ "@types/jest": "^29.5.12", "@types/jest-image-snapshot": "^6.1.0", "@types/lodash.merge": "^4.6.9", + "@types/lodash.uniqby": "^4.7.9", "@types/md5": "^2.3.0", "@types/node": "^18.11.9", "@types/papaparse": "^5.3.8", diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts index d5f1eda04d249..09db8ac9731b6 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts @@ -21,6 +21,13 @@ const HIGH_WATERMARK_KEY = 'session_replay_events_ingester' const replayEventsCounter = new Counter({ name: 'replay_events_ingested', help: 'Number of Replay events successfully ingested', + labelNames: ['snapshot_source'], +}) + +const dataIngestedCounter = new Counter({ + name: 'replay_data_ingested', + help: 'Amount of data being ingested', + labelNames: ['snapshot_source'], }) export class ReplayEventsIngester { @@ -177,7 +184,8 @@ export class ReplayEventsIngester { return drop('session_replay_summarizer_error') } - replayEventsCounter.inc() + replayEventsCounter.inc({ snapshot_source: replayRecord.snapshot_source ?? undefined }) + dataIngestedCounter.inc({ snapshot_source: replayRecord.snapshot_source ?? undefined }, replayRecord.size) return [ produce({ diff --git a/plugin-server/src/main/services/http-server.ts b/plugin-server/src/main/services/http-server.ts index d4bb5ee5151a8..f64bdde00cf36 100644 --- a/plugin-server/src/main/services/http-server.ts +++ b/plugin-server/src/main/services/http-server.ts @@ -121,11 +121,13 @@ async function getProfileByType(req: Request, res: Response) { v8Profiler.setSamplingInterval(interval ?? 1000) // in microseconds v8Profiler.startProfiling('cpu', true, mode) finishProfile = () => v8Profiler.stopProfiling('cpu') + break case 'heap': // See https://v8docs.nodesource.com/node-18.16/d7/d76/classv8_1_1_heap_profiler.html const depth = typeof req.query.depth === 'string' ? parseInt(req.query.depth) : 16 v8Profiler.startSamplingHeapProfiling(interval ?? 512 * 1024, depth) finishProfile = () => v8Profiler.stopSamplingHeapProfiling() + break } if (finishProfile) { diff --git a/plugin-server/src/worker/ingestion/event-pipeline/enrichExceptionEventStep.ts b/plugin-server/src/worker/ingestion/event-pipeline/enrichExceptionEventStep.ts index 6d11eaaaad7f9..909e3f291bd4e 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/enrichExceptionEventStep.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/enrichExceptionEventStep.ts @@ -35,16 +35,21 @@ export function enrichExceptionEventStep( let type: string | null = null let message: string | null = null let firstFunction: string | null = null - let exceptionStack: string | null = null let exceptionList: any[] | null = null try { - exceptionStack = event.properties['$exception_stack_trace_raw'] exceptionList = event.properties['$exception_list'] const fingerPrint = event.properties['$exception_fingerprint'] type = event.properties['$exception_type'] message = event.properties['$exception_message'] + if (!type && exceptionList && exceptionList.length > 0) { + type = exceptionList[0].type + } + if (!message && exceptionList && exceptionList.length > 0) { + message = exceptionList[0].value + } + if (fingerPrint) { EXTERNAL_FINGERPRINT_COUNTER.inc() return Promise.resolve(event) @@ -55,12 +60,7 @@ export function enrichExceptionEventStep( } try { - if (exceptionStack) { - const parsedStack = JSON.parse(exceptionStack) - if (parsedStack.length > 0) { - firstFunction = parsedStack[0].function - } - } else if (exceptionList && exceptionList.length > 0) { + if (exceptionList && exceptionList.length > 0) { const firstException = exceptionList[0] if (firstException.stacktrace) { // TODO: Should this be the last function instead?, or first in app function? diff --git a/plugin-server/src/worker/ingestion/timestamps.ts b/plugin-server/src/worker/ingestion/timestamps.ts index 3cfb3097aa256..bf1e82f4dffdf 100644 --- a/plugin-server/src/worker/ingestion/timestamps.ts +++ b/plugin-server/src/worker/ingestion/timestamps.ts @@ -45,13 +45,21 @@ export function parseEventTimestamp(data: PluginEvent, callback?: IngestionWarni parsedTs = now } - if (!parsedTs.isValid) { - callback?.('ignored_invalid_timestamp', { + const parsedTsOutOfBounds = parsedTs.year < 0 || parsedTs.year > 9999 + if (!parsedTs.isValid || parsedTsOutOfBounds) { + const details: Record = { eventUuid: data['uuid'] ?? '', field: 'timestamp', value: data['timestamp'] ?? '', - reason: parsedTs.invalidExplanation || 'unknown error', - }) + reason: parsedTs.invalidExplanation || (parsedTsOutOfBounds ? 'out of bounds' : 'unknown error'), + } + + if (parsedTsOutOfBounds) { + details['offset'] = data['offset'] + details['parsed_year'] = parsedTs.year + } + + callback?.('ignored_invalid_timestamp', details) return DateTime.utc() } diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/enrichExceptionEventStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/enrichExceptionEventStep.test.ts index c9e4ea17127f7..7330865f51bce 100644 --- a/plugin-server/tests/worker/ingestion/event-pipeline/enrichExceptionEventStep.test.ts +++ b/plugin-server/tests/worker/ingestion/event-pipeline/enrichExceptionEventStep.test.ts @@ -4,8 +4,28 @@ import { enrichExceptionEventStep } from '../../../../src/worker/ingestion/event jest.mock('../../../../src/worker/plugins/run') -const aStackTrace = - '[{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"dependenciesChecker","in_app":true,"lineno":721,"colno":42},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"?","in_app":true,"lineno":2474,"colno":40},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"Object.memoized [as tiles]","in_app":true,"lineno":632,"colno":24},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"dependenciesChecker","in_app":true,"lineno":721,"colno":42},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"memoized","in_app":true,"lineno":632,"colno":24},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"dependenciesChecker","in_app":true,"lineno":721,"colno":42},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"logic.selector","in_app":true,"lineno":2517,"colno":18},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"pathSelector","in_app":true,"lineno":2622,"colno":37},{"filename":"","function":"Array.reduce","in_app":true},{"filename":"http://localhost:8234/static/chunk-VDD5ZZ2W.js","function":"?","in_app":true,"lineno":2626,"colno":15}]' +const DEFAULT_EXCEPTION_LIST = [ + { + mechanism: { + handled: true, + type: 'generic', + synthetic: false, + }, + stacktrace: { + frames: [ + { + colno: 220, + filename: 'https://app-static-prod.posthog.com/static/chunk-UFQKIDIH.js', + function: 'submitZendeskTicket', + in_app: true, + lineno: 25, + }, + ], + }, + type: 'Error', + value: 'There was an error creating the support ticket with zendesk.', + }, +] const preIngestionEvent: PreIngestionEvent = { eventUuid: '018eebf3-cb48-750b-bfad-36409ea6f2b2', @@ -42,7 +62,7 @@ describe('enrichExceptionEvent()', () => { it('ignores non-exception events - even if they have a stack trace', async () => { event.event = 'not_exception' - event.properties['$exception_stack_trace_raw'] = '[{"some": "data"}]' + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST expect(event.properties['$exception_fingerprint']).toBeUndefined() const response = await enrichExceptionEventStep(runner, event) @@ -51,7 +71,8 @@ describe('enrichExceptionEvent()', () => { it('use a fingerprint if it is present', async () => { event.event = '$exception' - event.properties['$exception_stack_trace_raw'] = '[{"some": "data"}]' + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST + event.properties['$exception_fingerprint'] = 'some-fingerprint' const response = await enrichExceptionEventStep(runner, event) @@ -62,17 +83,21 @@ describe('enrichExceptionEvent()', () => { it('uses the message and stack trace as the simplest grouping', async () => { event.event = '$exception' event.properties['$exception_message'] = 'some-message' - event.properties['$exception_stack_trace_raw'] = aStackTrace + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST const response = await enrichExceptionEventStep(runner, event) - expect(response.properties['$exception_fingerprint']).toStrictEqual(['some-message', 'dependenciesChecker']) + expect(response.properties['$exception_fingerprint']).toStrictEqual([ + 'Error', + 'some-message', + 'submitZendeskTicket', + ]) }) it('includes type in stack grouping when present', async () => { event.event = '$exception' event.properties['$exception_message'] = 'some-message' - event.properties['$exception_stack_trace_raw'] = aStackTrace + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST event.properties['$exception_type'] = 'UnhandledRejection' const response = await enrichExceptionEventStep(runner, event) @@ -80,14 +105,14 @@ describe('enrichExceptionEvent()', () => { expect(response.properties['$exception_fingerprint']).toStrictEqual([ 'UnhandledRejection', 'some-message', - 'dependenciesChecker', + 'submitZendeskTicket', ]) }) it('falls back to message and type when no stack trace', async () => { event.event = '$exception' event.properties['$exception_message'] = 'some-message' - event.properties['$exception_stack_trace_raw'] = null + event.properties['$exception_list'] = null event.properties['$exception_type'] = 'UnhandledRejection' const response = await enrichExceptionEventStep(runner, event) @@ -98,11 +123,38 @@ describe('enrichExceptionEvent()', () => { it('adds no fingerprint if no qualifying properties', async () => { event.event = '$exception' event.properties['$exception_message'] = null - event.properties['$exception_stack_trace_raw'] = null + event.properties['$exception_list'] = null event.properties['$exception_type'] = null const response = await enrichExceptionEventStep(runner, event) expect(response.properties['$exception_fingerprint']).toBeUndefined() }) + + it('uses exception_list to generate message, type, and fingerprint when not present', async () => { + event.event = '$exception' + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST + + const response = await enrichExceptionEventStep(runner, event) + + expect(response.properties['$exception_fingerprint']).toStrictEqual([ + 'Error', + 'There was an error creating the support ticket with zendesk.', + 'submitZendeskTicket', + ]) + }) + + it('exception_type overrides exception_list to generate fingerprint when present', async () => { + event.event = '$exception' + event.properties['$exception_list'] = DEFAULT_EXCEPTION_LIST + event.properties['$exception_type'] = 'UnhandledRejection' + + const response = await enrichExceptionEventStep(runner, event) + + expect(response.properties['$exception_fingerprint']).toStrictEqual([ + 'UnhandledRejection', + 'There was an error creating the support ticket with zendesk.', + 'submitZendeskTicket', + ]) + }) }) diff --git a/plugin-server/tests/worker/ingestion/timestamps.test.ts b/plugin-server/tests/worker/ingestion/timestamps.test.ts index fae7847df01da..a70844a349ae9 100644 --- a/plugin-server/tests/worker/ingestion/timestamps.test.ts +++ b/plugin-server/tests/worker/ingestion/timestamps.test.ts @@ -145,6 +145,34 @@ describe('parseEventTimestamp()', () => { expect(timestamp.toUTC().toISO()).toEqual('2021-10-29T01:43:54.000Z') }) + it('timestamps adjusted way out of bounds are ignored', () => { + const event = { + offset: 600000000000000, + timestamp: '2021-10-28T01:00:00.000Z', + sent_at: '2021-10-28T01:05:00.000Z', + now: '2021-10-28T01:10:00.000Z', + uuid: new UUIDT(), + } as any as PluginEvent + + const callbackMock = jest.fn() + const timestamp = parseEventTimestamp(event, callbackMock) + expect(callbackMock.mock.calls).toEqual([ + [ + 'ignored_invalid_timestamp', + { + field: 'timestamp', + eventUuid: event.uuid, + offset: 600000000000000, + parsed_year: -16992, + reason: 'out of bounds', + value: '2021-10-28T01:00:00.000Z', + }, + ], + ]) + + expect(timestamp.toUTC().toISO()).toEqual('2020-08-12T01:02:00.000Z') + }) + it('reports timestamp parsing error and fallbacks to DateTime.utc', () => { const event = { team_id: 123, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 065550d04cb5c..cd099b2fb4230 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -53,8 +53,8 @@ dependencies: specifier: ^1.0.54 version: 1.0.54(luxon@3.5.0) '@posthog/icons': - specifier: 0.8.4 - version: 0.8.4(react-dom@18.2.0)(react@18.2.0) + specifier: 0.8.5 + version: 0.8.5(react-dom@18.2.0)(react@18.2.0) '@posthog/plugin-scaffold': specifier: ^1.4.4 version: 1.4.4 @@ -253,6 +253,9 @@ dependencies: lodash.merge: specifier: ^4.6.2 version: 4.6.2 + lodash.uniqby: + specifier: ^4.7.0 + version: 4.7.0 maplibre-gl: specifier: ^3.5.1 version: 3.5.1 @@ -278,8 +281,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.167.1 - version: 1.167.1 + specifier: 1.174.2 + version: 1.174.2 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -518,6 +521,9 @@ devDependencies: '@types/lodash.merge': specifier: ^4.6.9 version: 4.6.9 + '@types/lodash.uniqby': + specifier: ^4.7.9 + version: 4.7.9 '@types/node': specifier: ^18.11.9 version: 18.11.9 @@ -5414,8 +5420,8 @@ packages: luxon: 3.5.0 dev: false - /@posthog/icons@0.8.4(react-dom@18.2.0)(react@18.2.0): - resolution: {integrity: sha512-AMCLQ0SOcLytsjavs8FlLc52oeUdkyeoatPSPONZ9QK//Xqx967Xf3ZvFxHbOxYiKFCShd996AGqBn+5nrhxOw==} + /@posthog/icons@0.8.5(react-dom@18.2.0)(react@18.2.0): + resolution: {integrity: sha512-bFPMgnR3ZaNnMQ81OznYFQRd7KaCqXcI8xS3qS49UBkSZpKeJgH86JbWXBXI2q2GZWX00gc+gZxEo5EBkY7KcQ==} peerDependencies: react: '>=16.14.0' react-dom: '>=16.14.0' @@ -8397,6 +8403,12 @@ packages: '@types/lodash': 4.14.188 dev: true + /@types/lodash.uniqby@4.7.9: + resolution: {integrity: sha512-rjrXji/seS6BZJRgXrU2h6FqxRVufsbq/HE0Tx0SdgbtlWr2YmD/M64BlYEYYlaMcpZwy32IYVkMfUMYlPuv0w==} + dependencies: + '@types/lodash': 4.14.188 + dev: true + /@types/lodash@4.14.188: resolution: {integrity: sha512-zmEmF5OIM3rb7SbLCFYoQhO4dGt2FRM9AMkxvA3LaADOF1n8in/zGJlWji9fmafLoNyz+FoL6FE0SLtGIArD7w==} dev: true @@ -10627,6 +10639,11 @@ packages: requiresBuild: true dev: false + /core-js@3.38.1: + resolution: {integrity: sha512-OP35aUorbU3Zvlx7pjsFdu1rGNnD4pgw/CWoYzRY3t2EzoVT7shKHY1dlAy3f41cGIO7ZDPQimhGFTlEYkG/Hw==} + requiresBuild: true + dev: false + /core-util-is@1.0.2: resolution: {integrity: sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ==} @@ -15687,6 +15704,10 @@ packages: resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} dev: false + /lodash.uniqby@4.7.0: + resolution: {integrity: sha512-e/zcLx6CSbmaEgFHCA7BnoQKyCtKMxnuWrJygbwPs/AIn+IMKl66L8/s+wBUn5LRw2pZx3bUHibiV1b6aTWIww==} + dev: false + /lodash@4.17.21: resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} @@ -17752,9 +17773,10 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.167.1: - resolution: {integrity: sha512-xt+HWcB1vi6tcHxQEf1FgctWqolQli899Yb/6j581U9uZy6ow16qXPrKJghHSuoJ18l4s0WvFB997J2EghzfbQ==} + /posthog-js@1.174.2: + resolution: {integrity: sha512-UgS7eRcDVvVz2XSJ09NMX8zBcdpFnPayfiWDNF3xEbJTsIu1GipkkYNrVlsWlq8U1PIrviNm6i0Dyq8daaxssw==} dependencies: + core-js: 3.38.1 fflate: 0.4.8 preact: 10.24.3 web-vitals: 4.2.3 diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index c79582ed8d726..173909d404df6 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -171,8 +171,8 @@ def register_grandfathered_environment_nested_viewset( "project_dashboard_templates", ["project_id"], ) -project_dashboards_router = projects_router.register( - r"dashboards", dashboard.DashboardsViewSet, "project_dashboards", ["project_id"] +environment_dashboards_router, legacy_project_dashboards_router = register_grandfathered_environment_nested_viewset( + r"dashboards", dashboard.DashboardsViewSet, "environment_dashboards", ["team_id"] ) register_grandfathered_environment_nested_viewset( @@ -418,34 +418,54 @@ def register_grandfathered_environment_nested_viewset( projects_router.register(r"experiments", EnterpriseExperimentsViewSet, "project_experiments", ["project_id"]) register_grandfathered_environment_nested_viewset(r"groups", GroupsViewSet, "environment_groups", ["team_id"]) projects_router.register(r"groups_types", GroupsTypesViewSet, "project_groups_types", ["project_id"]) - project_insights_router = projects_router.register( - r"insights", EnterpriseInsightsViewSet, "project_insights", ["project_id"] + environment_insights_router, legacy_project_insights_router = register_grandfathered_environment_nested_viewset( + r"insights", EnterpriseInsightsViewSet, "environment_insights", ["team_id"] ) register_grandfathered_environment_nested_viewset( r"persons", EnterprisePersonViewSet, "environment_persons", ["team_id"] ) router.register(r"person", LegacyEnterprisePersonViewSet, "persons") else: - project_insights_router = projects_router.register(r"insights", InsightViewSet, "project_insights", ["project_id"]) + environment_insights_router, legacy_project_insights_router = register_grandfathered_environment_nested_viewset( + r"insights", InsightViewSet, "environment_insights", ["team_id"] + ) register_grandfathered_environment_nested_viewset(r"persons", PersonViewSet, "environment_persons", ["team_id"]) router.register(r"person", LegacyPersonViewSet, "persons") -project_dashboards_router.register( +environment_dashboards_router.register( r"sharing", sharing.SharingConfigurationViewSet, "environment_dashboard_sharing", ["team_id", "dashboard_id"], ) +legacy_project_dashboards_router.register( + r"sharing", + sharing.SharingConfigurationViewSet, + "project_dashboard_sharing", + ["team_id", "dashboard_id"], +) -project_insights_router.register( +environment_insights_router.register( r"sharing", sharing.SharingConfigurationViewSet, "environment_insight_sharing", ["team_id", "insight_id"], ) +legacy_project_insights_router.register( + r"sharing", + sharing.SharingConfigurationViewSet, + "project_insight_sharing", + ["team_id", "insight_id"], +) -project_insights_router.register( +environment_insights_router.register( + "thresholds", + alert.ThresholdViewSet, + "environment_insight_thresholds", + ["team_id", "insight_id"], +) +legacy_project_insights_router.register( "thresholds", alert.ThresholdViewSet, "project_insight_thresholds", diff --git a/posthog/api/activity_log.py b/posthog/api/activity_log.py index bbdfa1807da71..4730dde145ffb 100644 --- a/posthog/api/activity_log.py +++ b/posthog/api/activity_log.py @@ -3,7 +3,9 @@ from django.db.models import Q, QuerySet -from rest_framework import serializers, status, viewsets, pagination, mixins +from rest_framework import serializers, status, viewsets, mixins +from rest_framework.pagination import PageNumberPagination, CursorPagination, BasePagination + from posthog.api.utils import action from rest_framework.exceptions import ValidationError from rest_framework.request import Request @@ -11,7 +13,7 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer -from posthog.models import ActivityLog, FeatureFlag, Insight, NotificationViewed, User, Cohort +from posthog.models import ActivityLog, FeatureFlag, Insight, NotificationViewed, User, Cohort, HogFunction from posthog.models.comment import Comment from posthog.models.notebook.notebook import Notebook @@ -41,9 +43,28 @@ def get_unread(self, obj: ActivityLog) -> bool: return bookmark_date < obj.created_at.replace(microsecond=obj.created_at.microsecond // 1000 * 1000) -class ActivityLogPagination(pagination.CursorPagination): - ordering = "-created_at" - page_size = 100 +class ActivityLogPagination(BasePagination): + def __init__(self): + self.page_number_pagination = PageNumberPagination() + self.cursor_pagination = CursorPagination() + self.page_number_pagination.page_size = 100 + self.page_number_pagination.page_size_query_param = "page_size" + self.page_number_pagination.max_page_size = 1000 + self.cursor_pagination.page_size = 100 + self.cursor_pagination.ordering = "-created_at" + + def paginate_queryset(self, queryset, request, view=None): + self.request = request + if request.query_params.get("page"): + return self.page_number_pagination.paginate_queryset(queryset, request, view) + else: + return self.cursor_pagination.paginate_queryset(queryset, request, view) + + def get_paginated_response(self, data): + if self.request and self.request.query_params.get("page"): + return self.page_number_pagination.get_paginated_response(data) + else: + return self.cursor_pagination.get_paginated_response(data) # context manager for gathering a sequence of server timings @@ -85,9 +106,15 @@ def safely_get_queryset(self, queryset) -> QuerySet: queryset = queryset.filter(user=params.get("user")) if params.get("scope"): queryset = queryset.filter(scope=params.get("scope")) + if params.get("scopes", None): + scopes = str(params.get("scopes", "")).split(",") + queryset = queryset.filter(scope__in=scopes) if params.get("item_id"): queryset = queryset.filter(item_id=params.get("item_id")) + if params.get("page"): + queryset = queryset.order_by("-created_at") + return queryset @action(methods=["GET"], detail=False) @@ -116,6 +143,9 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp Comment.objects.filter(created_by=user, team_id=self.team.pk).values_list("id", flat=True) ) my_cohorts = list(Cohort.objects.filter(created_by=user, team_id=self.team.pk).values_list("id", flat=True)) + my_hog_functions = list( + HogFunction.objects.filter(created_by=user, team_id=self.team.pk).values_list("id", flat=True) + ) # then things they edited interesting_changes = [ @@ -181,6 +211,17 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp .values_list("item_id", flat=True) ) + my_changed_hog_functions = list( + ActivityLog.objects.filter( + team_id=self.team.id, + activity__in=interesting_changes, + user_id=user.pk, + scope="HogFunction", + ) + .exclude(item_id__in=my_hog_functions) + .values_list("item_id", flat=True) + ) + last_read_date = ( NotificationViewed.objects.filter(user=user).values_list("last_viewed_activity_date", flat=True).first() ) @@ -234,6 +275,7 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp ) | Q(Q(scope="Comment") & Q(item_id__in=my_comments)) | Q(Q(scope="Cohort") & Q(item_id__in=my_cohorts)) + | Q(Q(scope="HogFunction") & Q(item_id__in=my_hog_functions)) ) | Q( # don't want to see creation of these things since that was before the user edited these things @@ -248,6 +290,7 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp ) | Q(Q(scope="Comment") & Q(item_id__in=my_changed_comments)) | Q(Q(scope="Cohort") & Q(item_id__in=my_changed_cohorts)) + | Q(Q(scope="HogFunction") & Q(item_id__in=my_changed_hog_functions)) ) ) ) diff --git a/posthog/api/alert.py b/posthog/api/alert.py index 19611889c6662..a177c61b0322d 100644 --- a/posthog/api/alert.py +++ b/posthog/api/alert.py @@ -16,6 +16,9 @@ from posthog.schema import AlertState from posthog.api.insight import InsightBasicSerializer +from posthog.utils import relative_date_parse +from zoneinfo import ZoneInfo + class ThresholdSerializer(serializers.ModelSerializer): class Meta: @@ -73,6 +76,11 @@ def validate(self, data): return data +class RelativeDateTimeField(serializers.DateTimeField): + def to_internal_value(self, data): + return data + + class AlertSerializer(serializers.ModelSerializer): created_by = UserBasicSerializer(read_only=True) checks = AlertCheckSerializer(many=True, read_only=True) @@ -84,6 +92,7 @@ class AlertSerializer(serializers.ModelSerializer): write_only=True, allow_empty=False, ) + snoozed_until = RelativeDateTimeField(allow_null=True, required=False) class Meta: model = AlertConfiguration @@ -104,6 +113,7 @@ class Meta: "checks", "config", "calculation_interval", + "snoozed_until", ] read_only_fields = [ "id", @@ -149,6 +159,30 @@ def create(self, validated_data: dict) -> AlertConfiguration: return instance def update(self, instance, validated_data): + if "snoozed_until" in validated_data: + snoozed_until_param = validated_data.pop("snoozed_until") + + if snoozed_until_param is None: + instance.state = AlertState.NOT_FIRING + instance.snoozed_until = None + else: + # always store snoozed_until as UTC time + # as we look at current UTC time to check when to run alerts + snoozed_until = relative_date_parse( + snoozed_until_param, ZoneInfo("UTC"), increase=True, always_truncate=True + ) + instance.state = AlertState.SNOOZED + instance.snoozed_until = snoozed_until + + AlertCheck.objects.create( + alert_configuration=instance, + calculated_value=None, + condition=instance.condition, + targets_notified={}, + state=instance.state, + error=None, + ) + conditions_or_threshold_changed = False threshold_data = validated_data.pop("threshold", None) @@ -183,6 +217,12 @@ def update(self, instance, validated_data): return super().update(instance, validated_data) + def validate_snoozed_until(self, value): + if value is not None and not isinstance(value, str): + raise ValidationError("snoozed_until has to be passed in string format") + + return value + def validate_insight(self, value): if value and not are_alerts_supported_for_insight(value): raise ValidationError("Alerts are not supported for this insight.") diff --git a/posthog/api/dashboards/dashboard.py b/posthog/api/dashboards/dashboard.py index 86c2c568e9340..ca626c0d1a8c2 100644 --- a/posthog/api/dashboards/dashboard.py +++ b/posthog/api/dashboards/dashboard.py @@ -2,7 +2,7 @@ from typing import Any, Optional, cast import structlog -from django.db.models import Prefetch, QuerySet +from django.db.models import Prefetch from django.shortcuts import get_object_or_404 from django.utils.timezone import now from rest_framework import exceptions, serializers, viewsets @@ -30,7 +30,7 @@ from posthog.models.tagged_item import TaggedItem from posthog.models.user import User from posthog.user_permissions import UserPermissionsSerializerMixin -from posthog.utils import filters_override_requested_by_client +from posthog.utils import filters_override_requested_by_client, variables_override_requested_by_client logger = structlog.get_logger(__name__) @@ -126,6 +126,7 @@ def get_effective_privilege_level(self, dashboard: Dashboard) -> Dashboard.Privi class DashboardSerializer(DashboardBasicSerializer): tiles = serializers.SerializerMethodField() filters = serializers.SerializerMethodField() + variables = serializers.SerializerMethodField() created_by = UserBasicSerializer(read_only=True) use_template = serializers.CharField(write_only=True, allow_blank=True, required=False) use_dashboard = serializers.IntegerField(write_only=True, allow_null=True, required=False) @@ -150,6 +151,7 @@ class Meta: "use_dashboard", "delete_insights", "filters", + "variables", "tags", "tiles", "restriction_level", @@ -164,6 +166,12 @@ def validate_filters(self, value) -> dict: return value + def validate_variables(self, value) -> dict: + if not isinstance(value, dict): + raise serializers.ValidationError("Variables must be a dictionary") + + return value + @monitor(feature=Feature.DASHBOARD, endpoint="dashboard", method="POST") def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Dashboard: request = self.context["request"] @@ -301,6 +309,12 @@ def update(self, instance: Dashboard, validated_data: dict, *args: Any, **kwargs raise serializers.ValidationError("Filters must be a dictionary") instance.filters = request_filters + request_variables = initial_data.get("variables") + if request_variables: + if not isinstance(request_variables, dict): + raise serializers.ValidationError("Filters must be a dictionary") + instance.variables = request_variables + instance = super().update(instance, validated_data) user = cast(User, self.context["request"].user) @@ -410,6 +424,16 @@ def get_filters(self, dashboard: Dashboard) -> dict: return dashboard.filters + def get_variables(self, dashboard: Dashboard) -> dict: + request = self.context.get("request") + if request: + variables_override = variables_override_requested_by_client(request) + + if variables_override is not None: + return variables_override + + return dashboard.variables + def validate(self, data): if data.get("use_dashboard", None) and data.get("use_template", None): raise serializers.ValidationError("`use_dashboard` and `use_template` cannot be used together") @@ -437,7 +461,12 @@ class DashboardsViewSet( def get_serializer_class(self) -> type[BaseSerializer]: return DashboardBasicSerializer if self.action == "list" else DashboardSerializer - def safely_get_queryset(self, queryset) -> QuerySet: + def dangerously_get_queryset(self): + # Dashboards are retrieved under /environments/ because they include team-specific query results, + # but they are in fact project-level, rather than environment-level + assert self.team.project_id is not None + queryset = self.queryset.filter(team__project_id=self.team.project_id) + include_deleted = ( self.action == "partial_update" and "deleted" in self.request.data @@ -488,7 +517,7 @@ def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Response: dashboard = get_object_or_404(queryset, pk=pk) dashboard.last_accessed_at = now() dashboard.save(update_fields=["last_accessed_at"]) - serializer = DashboardSerializer(dashboard, context={"view": self, "request": request}) + serializer = DashboardSerializer(dashboard, context=self.get_serializer_context()) return Response(serializer.data) @action(methods=["PATCH"], detail=True) @@ -504,7 +533,7 @@ def move_tile(self, request: Request, *args: Any, **kwargs: Any) -> Response: serializer = DashboardSerializer( Dashboard.objects.get(id=from_dashboard), - context={"view": self, "request": request}, + context=self.get_serializer_context(), ) return Response(serializer.data) @@ -544,7 +573,7 @@ def create_from_template_json(self, request: Request, *args: Any, **kwargs: Any) dashboard.delete() raise - return Response(DashboardSerializer(dashboard, context={"view": self, "request": request}).data) + return Response(DashboardSerializer(dashboard, context=self.get_serializer_context()).data) class LegacyDashboardsViewSet(DashboardsViewSet): diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 0569e2d5772fe..bcc597474a3ca 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -10,15 +10,20 @@ from sentry_sdk import capture_exception from statshog.defaults.django import statsd -from posthog.geoip import get_geoip_properties from posthog.api.survey import SURVEY_TARGETING_FLAG_PREFIX -from posthog.api.utils import get_project_id, get_token, hostname_in_allowed_url_list, parse_domain +from posthog.api.utils import ( + get_project_id, + get_token, + hostname_in_allowed_url_list, + parse_domain, +) from posthog.database_healthcheck import DATABASE_FOR_FLAG_MATCHING from posthog.exceptions import ( - UnspecifiedCompressionFallbackParsingError, RequestParsingError, + UnspecifiedCompressionFallbackParsingError, generate_exception_response, ) +from posthog.geoip import get_geoip_properties from posthog.logging.timing import timed from posthog.metrics import LABEL_TEAM_ID from posthog.models import Team, User @@ -265,7 +270,11 @@ def get_decide(request: HttpRequest): response["sessionRecording"] = _session_recording_config_response(request, team, token) if settings.DECIDE_SESSION_REPLAY_QUOTA_CHECK: - from ee.billing.quota_limiting import QuotaLimitingCaches, QuotaResource, list_limited_team_attributes + from ee.billing.quota_limiting import ( + QuotaLimitingCaches, + QuotaResource, + list_limited_team_attributes, + ) limited_tokens_recordings = list_limited_team_attributes( QuotaResource.RECORDINGS, QuotaLimitingCaches.QUOTA_LIMITER_CACHE_KEY @@ -277,6 +286,8 @@ def get_decide(request: HttpRequest): response["surveys"] = True if team.surveys_opt_in else False response["heatmaps"] = True if team.heatmaps_opt_in else False + default_identified_only = team.pk >= settings.DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN + response["defaultIdentifiedOnly"] = bool(default_identified_only) site_apps = [] # errors mean the database is unavailable, bail in this case @@ -351,6 +362,7 @@ def _session_recording_config_response(request: HttpRequest, team: Team, token: "minimumDurationMilliseconds": minimum_duration, "linkedFlag": linked_flag, "networkPayloadCapture": team.session_recording_network_payload_capture_config or None, + "urlTriggers": team.session_recording_url_trigger_config, } if isinstance(team.session_replay_config, dict): diff --git a/posthog/api/event_definition.py b/posthog/api/event_definition.py index d8d1584c9c61e..4156d10f82793 100644 --- a/posthog/api/event_definition.py +++ b/posthog/api/event_definition.py @@ -88,7 +88,7 @@ def dangerously_get_queryset(self): search = self.request.GET.get("search", None) search_query, search_kwargs = term_search_filter_sql(self.search_fields, search) - params = {"team_id": self.team.project_id, "is_posthog_event": "$%", **search_kwargs} + params = {"project_id": self.project_id, "is_posthog_event": "$%", **search_kwargs} order_expressions = [self._ordering_params_from_request()] ingestion_taxonomy_is_available = self.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY) @@ -136,11 +136,11 @@ def dangerously_get_object(self): ): from ee.models.event_definition import EnterpriseEventDefinition - enterprise_event = EnterpriseEventDefinition.objects.filter(id=id, team_id=self.team_id).first() + enterprise_event = EnterpriseEventDefinition.objects.filter(id=id, team__project_id=self.project_id).first() if enterprise_event: return enterprise_event - non_enterprise_event = EventDefinition.objects.get(id=id, team_id=self.team_id) + non_enterprise_event = EventDefinition.objects.get(id=id, team__project_id=self.project_id) new_enterprise_event = EnterpriseEventDefinition( eventdefinition_ptr_id=non_enterprise_event.id, description="" ) @@ -148,7 +148,7 @@ def dangerously_get_object(self): new_enterprise_event.save() return new_enterprise_event - return EventDefinition.objects.get(id=id, team_id=self.team_id) + return EventDefinition.objects.get(id=id, team__project_id=self.project_id) def get_serializer_class(self) -> type[serializers.ModelSerializer]: serializer_class = self.serializer_class diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index aa9aa8222b9b2..324eb87765441 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -175,7 +175,7 @@ def validate_key(self, value): exclude_kwargs = {"pk": cast(FeatureFlag, self.instance).pk} if ( - FeatureFlag.objects.filter(key=value, team_id=self.context["team_id"], deleted=False) + FeatureFlag.objects.filter(key=value, team__project_id=self.context["project_id"], deleted=False) .exclude(**exclude_kwargs) .exists() ): diff --git a/posthog/api/hog_function.py b/posthog/api/hog_function.py index 5f926acb49bc9..d1dc2aca65860 100644 --- a/posthog/api/hog_function.py +++ b/posthog/api/hog_function.py @@ -3,6 +3,7 @@ import structlog from django_filters.rest_framework import DjangoFilterBackend from django.db.models import QuerySet +from loginas.utils import is_impersonated_session from rest_framework import serializers, viewsets, exceptions from rest_framework.serializers import BaseSerializer @@ -22,6 +23,7 @@ from posthog.cdp.templates import HOG_FUNCTION_TEMPLATES_BY_ID from posthog.cdp.validation import compile_hog, generate_template_bytecode, validate_inputs, validate_inputs_schema from posthog.constants import AvailableFeature +from posthog.models.activity_logging.activity_log import log_activity, changes_between, Detail from posthog.models.hog_functions.hog_function import HogFunction, HogFunctionState from posthog.plugins.plugin_server_api import create_hog_invocation_test @@ -306,3 +308,39 @@ def invocations(self, request: Request, *args, **kwargs): return Response({"status": "error"}, status=res.status_code) return Response(res.json()) + + def perform_create(self, serializer): + serializer.save() + log_activity( + organization_id=self.organization.id, + team_id=self.team_id, + user=serializer.context["request"].user, + was_impersonated=is_impersonated_session(serializer.context["request"]), + item_id=serializer.instance.id, + scope="HogFunction", + activity="created", + detail=Detail(name=serializer.instance.name, type=serializer.instance.type), + ) + + def perform_update(self, serializer): + instance_id = serializer.instance.id + + try: + before_update = HogFunction.objects.get(pk=instance_id) + except HogFunction.DoesNotExist: + before_update = None + + serializer.save() + + changes = changes_between("HogFunction", previous=before_update, current=serializer.instance) + + log_activity( + organization_id=self.organization.id, + team_id=self.team_id, + user=serializer.context["request"].user, + was_impersonated=is_impersonated_session(serializer.context["request"]), + item_id=instance_id, + scope="HogFunction", + activity="updated", + detail=Detail(changes=changes, name=serializer.instance.name, type=serializer.instance.type), + ) diff --git a/posthog/api/insight.py b/posthog/api/insight.py index f27a1f41e559f..d1aa643a400a0 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -60,6 +60,7 @@ from posthog.hogql_queries.apply_dashboard_filters import ( WRAPPER_NODE_KINDS, apply_dashboard_filters_to_dict, + apply_dashboard_variables_to_dict, ) from posthog.hogql_queries.legacy_compatibility.feature_flag import ( hogql_insights_replace_filters, @@ -109,10 +110,11 @@ from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL from posthog.user_permissions import UserPermissionsSerializerMixin from posthog.utils import ( - filters_override_requested_by_client, refresh_requested_by_client, relative_date_parse, str_to_bool, + filters_override_requested_by_client, + variables_override_requested_by_client, ) logger = structlog.get_logger(__name__) @@ -594,12 +596,17 @@ def to_representation(self, instance: Insight): dashboard: Optional[Dashboard] = self.context.get("dashboard") request: Optional[Request] = self.context.get("request") dashboard_filters_override = filters_override_requested_by_client(request) if request else None + dashboard_variables_override = variables_override_requested_by_client(request) if request else None if hogql_insights_replace_filters(instance.team) and ( instance.query is not None or instance.query_from_filters is not None ): query = instance.query or instance.query_from_filters - if dashboard is not None or dashboard_filters_override is not None: + if ( + dashboard is not None + or dashboard_filters_override is not None + or dashboard_variables_override is not None + ): query = apply_dashboard_filters_to_dict( query, ( @@ -611,6 +618,12 @@ def to_representation(self, instance: Insight): ), instance.team, ) + + query = apply_dashboard_variables_to_dict( + query, + dashboard_variables_override or {}, + instance.team, + ) representation["filters"] = {} representation["query"] = query else: @@ -618,7 +631,9 @@ def to_representation(self, instance: Insight): dashboard=dashboard, dashboard_filters_override=dashboard_filters_override ) representation["query"] = instance.get_effective_query( - dashboard=dashboard, dashboard_filters_override=dashboard_filters_override + dashboard=dashboard, + dashboard_filters_override=dashboard_filters_override, + dashboard_variables_override=dashboard_variables_override, ) if "insight" not in representation["filters"] and not representation["query"]: @@ -639,16 +654,19 @@ def insight_result(self, insight: Insight) -> InsightResult: refresh_requested = refresh_requested_by_client(self.context["request"]) execution_mode = execution_mode_from_refresh(refresh_requested) filters_override = filters_override_requested_by_client(self.context["request"]) + variables_override = variables_override_requested_by_client(self.context["request"]) if self.context.get("is_shared", False): execution_mode = shared_insights_execution_mode(execution_mode) return calculate_for_query_based_insight( insight, + team=self.context["get_team"](), dashboard=dashboard, execution_mode=execution_mode, user=None if self.context["request"].user.is_anonymous else self.context["request"].user, filters_override=filters_override, + variables_override=variables_override, ) except ExposedHogQLError as e: raise ValidationError(str(e)) @@ -726,7 +744,12 @@ def get_serializer_context(self) -> dict[str, Any]: context["is_shared"] = isinstance(self.request.successful_authenticator, SharingAccessTokenAuthentication) return context - def safely_get_queryset(self, queryset) -> QuerySet: + def dangerously_get_queryset(self): + # Insights are retrieved under /environments/ because they include team-specific query results, + # but they are in fact project-level, rather than environment-level + assert self.team.project_id is not None + queryset = self.queryset.filter(team__project_id=self.team.project_id) + include_deleted = False if isinstance(self.request.successful_authenticator, SharingAccessTokenAuthentication): diff --git a/posthog/api/insight_variable.py b/posthog/api/insight_variable.py index 85303b4e58c84..f1a135bc69e0c 100644 --- a/posthog/api/insight_variable.py +++ b/posthog/api/insight_variable.py @@ -1,5 +1,6 @@ from django_filters.rest_framework import DjangoFilterBackend from rest_framework import serializers, viewsets +from rest_framework.exceptions import ValidationError from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.models.insight_variable import InsightVariable @@ -9,7 +10,7 @@ class InsightVariableSerializer(serializers.ModelSerializer): class Meta: model = InsightVariable - fields = ["id", "name", "type", "default_value", "created_by", "created_at", "code_name"] + fields = ["id", "name", "type", "default_value", "created_by", "created_at", "code_name", "values"] read_only_fields = ["id", "code_name", "created_by", "created_at"] @@ -22,6 +23,13 @@ def create(self, validated_data): "".join(n for n in validated_data["name"] if n.isalnum() or n == " ").replace(" ", "_").lower() ) + count = InsightVariable.objects.filter( + team_id=self.context["team_id"], code_name=validated_data["code_name"] + ).count() + + if count > 0: + raise ValidationError("Variable with name already exists") + return InsightVariable.objects.create(**validated_data) diff --git a/posthog/api/organization_feature_flag.py b/posthog/api/organization_feature_flag.py index d91ec15ba1c41..3585f5f149849 100644 --- a/posthog/api/organization_feature_flag.py +++ b/posthog/api/organization_feature_flag.py @@ -179,6 +179,7 @@ def copy_flags(self, request, *args, **kwargs): context = { "request": request, "team_id": target_project_id, + "project_id": target_project_id, } existing_flag = FeatureFlag.objects.filter( diff --git a/posthog/api/project.py b/posthog/api/project.py index af933e440b5a3..6fbd4e42891f3 100644 --- a/posthog/api/project.py +++ b/posthog/api/project.py @@ -297,7 +297,7 @@ def update(self, instance: Project, validated_data: dict[str, Any]) -> Project: item_id="#", activity="updated", detail=Detail( - name="Survey Config", + name="global survey appearance", changes=survey_config_changes_between, ), ) diff --git a/posthog/api/query.py b/posthog/api/query.py index 7896e102ca204..1d3bf3f67edda 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -28,7 +28,10 @@ from posthog.event_usage import report_user_action from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt from posthog.hogql.errors import ExposedHogQLError -from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters_to_dict +from posthog.hogql_queries.apply_dashboard_filters import ( + apply_dashboard_filters_to_dict, + apply_dashboard_variables_to_dict, +) from posthog.hogql_queries.query_runner import ExecutionMode, execution_mode_from_refresh from posthog.models.user import User from posthog.rate_limit import ( @@ -79,6 +82,14 @@ def create(self, request, *args, **kwargs) -> Response: data.query.model_dump(), data.filters_override.model_dump(), self.team ) # type: ignore + if data.variables_override is not None: + if isinstance(data.query, BaseModel): + query_as_dict = data.query.model_dump() + else: + query_as_dict = data.query + + data.query = apply_dashboard_variables_to_dict(query_as_dict, data.variables_override, self.team) # type: ignore + client_query_id = data.client_query_id or uuid.uuid4().hex execution_mode = execution_mode_from_refresh(data.refresh) response_status: int = status.HTTP_200_OK diff --git a/posthog/api/routing.py b/posthog/api/routing.py index 300f579156bfb..9ff2fede76449 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -242,14 +242,12 @@ def team(self) -> Team: @cached_property def project_id(self) -> int: if team_from_token := self._get_team_from_request(): - assert team_from_token.project_id is not None return team_from_token.project_id if self.param_derived_from_user_current_team == "project_id": user = cast(User, self.request.user) team = user.team assert team is not None - assert team.project_id is not None return team.project_id return self.parents_query_dict["project_id"] diff --git a/posthog/api/services/query.py b/posthog/api/services/query.py index be9f6d56bea34..c3ca91cb88ab5 100644 --- a/posthog/api/services/query.py +++ b/posthog/api/services/query.py @@ -18,6 +18,7 @@ from posthog.models import Team, User from posthog.schema import ( DatabaseSchemaQueryResponse, + HogQLVariable, HogQuery, DashboardFilter, HogQLAutocomplete, @@ -35,6 +36,7 @@ def process_query_dict( query_json: dict, *, dashboard_filters_json: Optional[dict] = None, + variables_override_json: Optional[dict] = None, limit_context: Optional[LimitContext] = None, execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, user: Optional[User] = None, @@ -44,11 +46,17 @@ def process_query_dict( ) -> dict | BaseModel: model = QuerySchemaRoot.model_validate(query_json) tag_queries(query=query_json) + dashboard_filters = DashboardFilter.model_validate(dashboard_filters_json) if dashboard_filters_json else None + variables_override = ( + [HogQLVariable.model_validate(n) for n in variables_override_json.values()] if variables_override_json else None + ) + return process_query_model( team, model.root, dashboard_filters=dashboard_filters, + variables_override=variables_override, limit_context=limit_context, execution_mode=execution_mode, user=user, @@ -63,6 +71,7 @@ def process_query_model( query: BaseModel, # mypy has problems with unions and isinstance *, dashboard_filters: Optional[DashboardFilter] = None, + variables_override: Optional[list[HogQLVariable]] = None, limit_context: Optional[LimitContext] = None, execution_mode: ExecutionMode = ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, user: Optional[User] = None, @@ -80,6 +89,7 @@ def process_query_model( team, query.source, dashboard_filters=dashboard_filters, + variables_override=variables_override, limit_context=limit_context, execution_mode=execution_mode, user=user, @@ -119,6 +129,8 @@ def process_query_model( else: # Query runner available - it will handle execution as well as caching if dashboard_filters: query_runner.apply_dashboard_filters(dashboard_filters) + if variables_override: + query_runner.apply_variable_overrides(variables_override) result = query_runner.run( execution_mode=execution_mode, user=user, diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py index ef85e143152c2..d0cf5af56bafd 100644 --- a/posthog/api/sharing.py +++ b/posthog/api/sharing.py @@ -246,6 +246,7 @@ def retrieve(self, request: Request, *args: Any, **kwargs: Any) -> Any: "request": request, "user_permissions": UserPermissions(cast(User, request.user), resource.team), "is_shared": True, + "get_team": lambda: resource.team, } exported_data: dict[str, Any] = {"type": "embed" if embedded else "scene"} diff --git a/posthog/api/survey.py b/posthog/api/survey.py index b2faf7a419186..ea894a7dd30c0 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -386,7 +386,7 @@ def update(self, instance: Survey, validated_data): instance.targeting_flag.active = False instance.targeting_flag.save() - iteration_count = validated_data.get("iteration_count") + iteration_count = validated_data.get("iteration_count", None) if ( instance.current_iteration is not None and iteration_count is not None @@ -396,8 +396,9 @@ def update(self, instance: Survey, validated_data): f"Cannot change survey recurrence to {iteration_count}, should be at least {instance.current_iteration}" ) - instance.iteration_count = iteration_count - instance.iteration_frequency_days = validated_data.get("iteration_frequency_days") + if iteration_count is not None: + instance.iteration_count = iteration_count + instance.iteration_frequency_days = validated_data.get("iteration_frequency_days") instance = super().update(instance, validated_data) diff --git a/posthog/api/team.py b/posthog/api/team.py index c37054c3bedc5..566ec7fad57ed 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -103,6 +103,7 @@ class Meta: "session_recording_minimum_duration_milliseconds", "session_recording_linked_flag", "session_recording_network_payload_capture_config", + "session_recording_url_trigger_config", "session_replay_config", "survey_config", "recording_domains", @@ -156,6 +157,7 @@ class Meta: "session_recording_minimum_duration_milliseconds", "session_recording_linked_flag", "session_recording_network_payload_capture_config", + "session_recording_url_trigger_config", "session_replay_config", "survey_config", "effective_membership_level", @@ -357,7 +359,7 @@ def update(self, instance: Team, validated_data: dict[str, Any]) -> Team: item_id="", activity="updated", detail=Detail( - name="Team Survey Config", + name="global survey appearance", changes=survey_config_changes_between, ), ) diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 22b2c810b3c44..a55b9fe7296a2 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -97,7 +98,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -109,6 +109,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -147,6 +148,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -262,7 +264,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -275,6 +276,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -378,7 +380,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -391,6 +392,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -419,7 +421,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -431,6 +432,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -469,6 +471,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -584,7 +587,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -597,6 +599,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index 4180dd11bd55c..6f4850c626049 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -115,6 +116,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -239,7 +241,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -252,6 +253,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -320,7 +322,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -332,6 +333,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -370,6 +372,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -494,7 +497,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -507,6 +509,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -533,7 +536,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -545,6 +547,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr index 59745fc27236d..5f9fbda86651a 100644 --- a/posthog/api/test/__snapshots__/test_api_docs.ambr +++ b/posthog/api/test/__snapshots__/test_api_docs.ambr @@ -11,6 +11,9 @@ '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"', '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportRunViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExportRun" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard.py: Warning [DashboardsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard.Dashboard" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_id". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_distinct_id". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -26,6 +29,18 @@ '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "has_content". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "filename". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group.group.Group" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [EnterpriseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_cache_target_age". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_columns". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_is_cached". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_query_status". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.', + '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_types". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/ee/clickhouse/views/person.py: Warning [EnterprisePersonViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.person.person.Person" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_config". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -43,7 +58,6 @@ '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.', @@ -65,8 +79,6 @@ '/home/runner/work/posthog/posthog/posthog/api/annotation.py: Warning [AnnotationsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.annotation.Annotation" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/cohort.py: Warning [CohortViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.cohort.cohort.Cohort" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard_templates.py: Warning [DashboardTemplateViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard_templates.DashboardTemplate" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard.py: Warning [DashboardsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard.Dashboard" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/early_access_feature.py: Warning [EarlyAccessFeatureViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.early_access_feature.EarlyAccessFeature" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/posthog/api/team.py: Warning [TeamViewSet > TeamSerializer]: unable to resolve type hint for function "get_product_intents". Consider using a type hint or @extend_schema_field. Defaulting to string.', "/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Error [EventDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", @@ -75,18 +87,6 @@ '/home/runner/work/posthog/posthog/posthog/api/feature_flag.py: Warning [FeatureFlagViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feature_flag.feature_flag.FeatureFlag" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/api/feature_flag_role_access.py: Warning [FeatureFlagRoleAccessViewSet]: could not derive type of path parameter "project_id" because model "ee.models.feature_flag_role_access.FeatureFlagRoleAccess" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsTypesViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group_type_mapping.GroupTypeMapping" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [EnterpriseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_cache_target_age". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_columns". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_is_cached". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_query_status". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.', - '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_types". Consider using a type hint or @extend_schema_field. Defaulting to string.', '/home/runner/work/posthog/posthog/posthog/api/notebook.py: Warning [NotebookViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.notebook.notebook.Notebook" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', "/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')", '/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".', @@ -99,6 +99,9 @@ 'Warning: encountered multiple names for the same choice set (EffectivePrivilegeLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.', 'Warning: encountered multiple names for the same choice set (MembershipLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.', 'Warning: operationId "environments_app_metrics_historical_exports_retrieve" has collisions [(\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.', + 'Warning: operationId "environments_insights_activity_retrieve" has collisions [(\'/api/environments/{project_id}/insights/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/insights/activity/\', \'get\')]. resolving with numeral suffixes.', + 'Warning: operationId "Funnels" has collisions [(\'/api/environments/{project_id}/insights/funnel/\', \'post\'), (\'/api/projects/{project_id}/insights/funnel/\', \'post\')]. resolving with numeral suffixes.', + 'Warning: operationId "Trends" has collisions [(\'/api/environments/{project_id}/insights/trend/\', \'post\'), (\'/api/projects/{project_id}/insights/trend/\', \'post\')]. resolving with numeral suffixes.', 'Warning: operationId "environments_persons_activity_retrieve" has collisions [(\'/api/environments/{project_id}/persons/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/persons/activity/\', \'get\')]. resolving with numeral suffixes.', 'Warning: operationId "list" has collisions [(\'/api/organizations/\', \'get\'), (\'/api/organizations/{organization_id}/projects/\', \'get\')]. resolving with numeral suffixes.', 'Warning: operationId "create" has collisions [(\'/api/organizations/\', \'post\'), (\'/api/organizations/{organization_id}/projects/\', \'post\')]. resolving with numeral suffixes.', diff --git a/posthog/api/test/__snapshots__/test_cohort.ambr b/posthog/api/test/__snapshots__/test_cohort.ambr index 071e7fff34d8f..d471209df9f64 100644 --- a/posthog/api/test/__snapshots__/test_cohort.ambr +++ b/posthog/api/test/__snapshots__/test_cohort.ambr @@ -81,7 +81,7 @@ cohort_id FROM cohortpeople WHERE (team_id = 2 - AND cohort_id = '1' + AND cohort_id = '2' AND version < '2') ''' # --- @@ -177,7 +177,7 @@ cohort_id FROM cohortpeople WHERE (team_id = 2 - AND cohort_id = '1' + AND cohort_id = '2' AND version < '2') ''' # --- @@ -187,7 +187,7 @@ DELETE FROM cohortpeople WHERE (team_id = 2 - AND cohort_id = '1' + AND cohort_id = '2' AND version < '2') ''' # --- diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 6c941c07a93cb..7e266f47d2009 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -65,153 +65,73 @@ # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.10 ''' - SELECT "posthog_hogfunction"."id", - "posthog_hogfunction"."team_id", - "posthog_hogfunction"."name", - "posthog_hogfunction"."description", - "posthog_hogfunction"."created_at", - "posthog_hogfunction"."created_by_id", - "posthog_hogfunction"."deleted", - "posthog_hogfunction"."updated_at", - "posthog_hogfunction"."enabled", - "posthog_hogfunction"."icon_url", - "posthog_hogfunction"."hog", - "posthog_hogfunction"."bytecode", - "posthog_hogfunction"."inputs_schema", - "posthog_hogfunction"."inputs", - "posthog_hogfunction"."encrypted_inputs", - "posthog_hogfunction"."filters", - "posthog_hogfunction"."masking", - "posthog_hogfunction"."template_id", - "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_hogfunction" - INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") - WHERE ("posthog_hogfunction"."team_id" = 2 - AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + SELECT "posthog_user"."id", + "posthog_user"."password", + "posthog_user"."last_login", + "posthog_user"."first_name", + "posthog_user"."last_name", + "posthog_user"."is_staff", + "posthog_user"."date_joined", + "posthog_user"."uuid", + "posthog_user"."current_organization_id", + "posthog_user"."current_team_id", + "posthog_user"."email", + "posthog_user"."pending_email", + "posthog_user"."temporary_token", + "posthog_user"."distinct_id", + "posthog_user"."is_email_verified", + "posthog_user"."has_seen_product_intro_for", + "posthog_user"."strapi_id", + "posthog_user"."is_active", + "posthog_user"."theme_mode", + "posthog_user"."partial_notification_settings", + "posthog_user"."anonymize_data", + "posthog_user"."toolbar_mode", + "posthog_user"."hedgehog_config", + "posthog_user"."events_column_config", + "posthog_user"."email_opt_in" + FROM "posthog_user" + WHERE "posthog_user"."id" = 2 + LIMIT 21 ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.11 ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE ("posthog_team"."project_id" = 2 - AND "posthog_team"."id" = 2) - LIMIT 21 + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + WHERE ("posthog_featureflag"."active" + AND NOT "posthog_featureflag"."deleted" + AND "posthog_featureflag"."team_id" = 2) ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.12 ''' - SELECT 1 AS "a" - FROM "posthog_grouptypemapping" - WHERE "posthog_grouptypemapping"."team_id" = 2 - LIMIT 1 + SELECT "posthog_pluginconfig"."id", + "posthog_pluginconfig"."web_token", + "posthog_pluginsourcefile"."updated_at", + "posthog_plugin"."updated_at", + "posthog_pluginconfig"."updated_at" + FROM "posthog_pluginconfig" + INNER JOIN "posthog_plugin" ON ("posthog_pluginconfig"."plugin_id" = "posthog_plugin"."id") + INNER JOIN "posthog_pluginsourcefile" ON ("posthog_plugin"."id" = "posthog_pluginsourcefile"."plugin_id") + WHERE ("posthog_pluginconfig"."enabled" + AND "posthog_pluginsourcefile"."filename" = 'site.ts' + AND "posthog_pluginsourcefile"."status" = 'TRANSPILED' + AND "posthog_pluginconfig"."team_id" = 2) ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.13 @@ -290,7 +210,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -302,6 +221,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -388,6 +308,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -428,76 +349,6 @@ ''' # --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.4 - ''' - SELECT "posthog_project"."id", - "posthog_project"."organization_id", - "posthog_project"."name", - "posthog_project"."created_at" - FROM "posthog_project" - WHERE "posthog_project"."id" = 2 - LIMIT 21 - ''' -# --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.5 - ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE ("posthog_team"."project_id" = 2 - AND "posthog_team"."id" = 2) - LIMIT 21 - ''' -# --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.6 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -529,7 +380,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.7 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.5 ''' SELECT "posthog_organizationmembership"."id", "posthog_organizationmembership"."organization_id", @@ -561,7 +412,7 @@ WHERE "posthog_organizationmembership"."user_id" = 2 ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.8 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.6 ''' SELECT "posthog_team"."id", "posthog_team"."organization_id", @@ -570,9 +421,27 @@ WHERE "posthog_team"."organization_id" IN ('00000000-0000-0000-0000-000000000000'::uuid) ''' # --- -# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.9 +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.7 ''' - SELECT "posthog_team"."id", + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."team_id", + "posthog_hogfunction"."name", + "posthog_hogfunction"."description", + "posthog_hogfunction"."created_at", + "posthog_hogfunction"."created_by_id", + "posthog_hogfunction"."deleted", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."enabled", + "posthog_hogfunction"."icon_url", + "posthog_hogfunction"."hog", + "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."inputs_schema", + "posthog_hogfunction"."inputs", + "posthog_hogfunction"."encrypted_inputs", + "posthog_hogfunction"."filters", + "posthog_hogfunction"."masking", + "posthog_hogfunction"."template_id", + "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", "posthog_team"."project_id", @@ -596,6 +465,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -621,12 +491,36 @@ "posthog_team"."modifiers", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", + "posthog_team"."plugins_opt_in", + "posthog_team"."opt_out_capture", + "posthog_team"."event_names", + "posthog_team"."event_names_with_usage", + "posthog_team"."event_properties", + "posthog_team"."event_properties_with_usage", + "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE ("posthog_team"."project_id" = 2 - AND "posthog_team"."id" = 2) - LIMIT 21 + FROM "posthog_hogfunction" + INNER JOIN "posthog_team" ON ("posthog_hogfunction"."team_id" = "posthog_team"."id") + WHERE ("posthog_hogfunction"."team_id" = 2 + AND "posthog_hogfunction"."filters" @> '{"filter_test_accounts": true}'::jsonb) + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.8 + ''' + SELECT 1 AS "a" + FROM "posthog_grouptypemapping" + WHERE "posthog_grouptypemapping"."team_id" = 2 + LIMIT 1 + ''' +# --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.9 + ''' + SELECT "posthog_productintent"."product_type", + "posthog_productintent"."created_at", + "posthog_productintent"."onboarding_completed_at" + FROM "posthog_productintent" + WHERE "posthog_productintent"."team_id" = 2 ''' # --- # name: TestDecide.test_flag_with_behavioural_cohorts @@ -673,6 +567,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -721,7 +616,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -733,6 +627,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -771,6 +666,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -927,6 +823,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -975,7 +872,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -987,6 +883,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1025,6 +922,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1187,6 +1085,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1302,6 +1201,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index d56fba9679c00..ad603b62d145a 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -25,6 +25,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -118,7 +119,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -130,6 +130,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -168,6 +169,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 34878bd420b62..59cbbe8bf1497 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index db27b28fd8ee2..0c17549699ba7 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -466,6 +466,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -675,6 +676,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1044,6 +1046,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1186,6 +1189,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1487,6 +1491,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1555,7 +1560,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1567,6 +1571,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1605,6 +1610,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1677,6 +1683,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1742,6 +1749,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1915,7 +1923,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1928,6 +1935,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 768244df660f2..98cf5c1119b21 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -646,7 +646,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -658,6 +657,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -696,6 +696,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -754,6 +755,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -851,6 +853,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -888,6 +891,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -965,6 +969,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1021,6 +1026,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1083,7 +1089,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1095,6 +1100,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1133,6 +1139,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1228,8 +1235,8 @@ SELECT COUNT(*) AS "__count" FROM "posthog_dashboarditem" INNER JOIN "posthog_team" ON ("posthog_dashboarditem"."team_id" = "posthog_team"."id") - WHERE (NOT ("posthog_dashboarditem"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboarditem"."deleted")) ''' # --- # name: TestInsight.test_listing_insights_does_not_nplus1.26 @@ -1286,6 +1293,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1326,7 +1334,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1339,6 +1346,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1352,7 +1360,6 @@ T5."first_name", T5."last_name", T5."is_staff", - T5."is_active", T5."date_joined", T5."uuid", T5."current_organization_id", @@ -1365,6 +1372,7 @@ T5."requested_password_reset_at", T5."has_seen_product_intro_for", T5."strapi_id", + T5."is_active", T5."theme_mode", T5."partial_notification_settings", T5."anonymize_data", @@ -1376,8 +1384,8 @@ INNER JOIN "posthog_team" ON ("posthog_dashboarditem"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboarditem"."created_by_id" = "posthog_user"."id") LEFT OUTER JOIN "posthog_user" T5 ON ("posthog_dashboarditem"."last_modified_by_id" = T5."id") - WHERE (NOT ("posthog_dashboarditem"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboarditem"."deleted")) ORDER BY "posthog_dashboarditem"."order" ASC LIMIT 100 ''' @@ -1395,6 +1403,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1425,6 +1434,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1513,6 +1523,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1543,6 +1554,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1640,6 +1652,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1678,6 +1691,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1771,6 +1785,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1822,6 +1837,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1863,6 +1879,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1928,6 +1945,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index 184b02b22cca2..42e7977297142 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -106,6 +106,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -219,6 +220,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -312,6 +314,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -377,6 +380,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -483,6 +487,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -520,6 +525,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -585,6 +591,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -678,6 +685,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -743,6 +751,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -840,6 +849,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -900,6 +910,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -988,6 +999,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1106,6 +1118,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1170,6 +1183,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1231,9 +1245,10 @@ ''' SELECT 1 AS "a" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE (NOT "posthog_featureflag"."deleted" AND "posthog_featureflag"."key" = 'copied-flag-key' - AND "posthog_featureflag"."team_id" = 2) + AND "posthog_team"."project_id" = 2) LIMIT 1 ''' # --- @@ -1276,7 +1291,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1288,6 +1302,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1361,6 +1376,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1427,7 +1443,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1440,6 +1455,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1460,7 +1476,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1473,6 +1488,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/__snapshots__/test_plugin.ambr b/posthog/api/test/__snapshots__/test_plugin.ambr index 7e35461d29c27..7231828279938 100644 --- a/posthog/api/test/__snapshots__/test_plugin.ambr +++ b/posthog/api/test/__snapshots__/test_plugin.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -164,7 +164,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -176,6 +175,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -382,7 +382,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -394,6 +393,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -623,7 +623,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -635,6 +634,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index bb2d9023a2401..78d50ce74a9fa 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index f20862f313aac..d19ead09d4cfd 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -122,7 +122,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -134,6 +133,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -172,6 +172,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/dashboards/__init__.py b/posthog/api/test/dashboards/__init__.py index ad6505b5a61a7..dff375dcae21d 100644 --- a/posthog/api/test/dashboards/__init__.py +++ b/posthog/api/test/dashboards/__init__.py @@ -83,6 +83,8 @@ def list_dashboards( team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK, query_params: Optional[dict] = None, + *, + parent: Literal["project", "environment"] = "project", ) -> dict: if team_id is None: team_id = self.team.id @@ -90,7 +92,7 @@ def list_dashboards( if query_params is None: query_params = {} - response = self.client.get(f"/api/projects/{team_id}/dashboards/", query_params) + response = self.client.get(f"/api/{parent}s/{team_id}/dashboards/", query_params) self.assertEqual(response.status_code, expected_status) response_json = response.json() diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index 911d8d728be53..2820e9ffea6b9 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -112,6 +113,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -143,7 +145,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -155,6 +156,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -193,6 +195,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -269,6 +272,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -281,7 +285,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -294,6 +297,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -304,8 +308,8 @@ FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2 + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted") AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -393,6 +397,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -433,7 +438,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -446,6 +450,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -459,7 +464,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -472,6 +476,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -575,6 +580,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -635,6 +641,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -714,6 +721,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -754,7 +762,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -767,6 +774,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -780,7 +788,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -793,6 +800,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -865,6 +873,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -925,6 +934,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -973,6 +983,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1000,6 +1011,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1058,6 +1070,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1099,6 +1112,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1164,6 +1178,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1241,6 +1256,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1282,7 +1298,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1294,6 +1309,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1332,6 +1348,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1390,6 +1407,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1487,6 +1505,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1524,6 +1543,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1641,6 +1661,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1685,6 +1706,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1773,6 +1795,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1796,7 +1819,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1808,6 +1830,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1846,6 +1869,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1941,8 +1965,8 @@ SELECT COUNT(*) AS "__count" FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted")) ''' # --- # name: TestDashboard.test_listing_dashboards_is_not_nplus1.27 @@ -1957,6 +1981,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -1969,7 +1994,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1982,6 +2006,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1992,8 +2017,8 @@ FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted")) ORDER BY "posthog_dashboard"."pinned" DESC, "posthog_dashboard"."name" ASC LIMIT 300 @@ -2029,6 +2054,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -2067,6 +2093,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2160,6 +2187,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2211,6 +2239,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -2252,6 +2281,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2317,6 +2347,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2364,7 +2395,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2376,6 +2406,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2414,6 +2445,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2511,6 +2543,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2551,7 +2584,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2564,6 +2596,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2577,7 +2610,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -2590,6 +2622,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -2630,7 +2663,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2642,6 +2674,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2680,6 +2713,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2756,6 +2790,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -2794,6 +2829,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2887,6 +2923,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2938,6 +2975,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -2979,6 +3017,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3044,6 +3083,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3117,6 +3157,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3214,6 +3255,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -3251,6 +3293,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3368,6 +3411,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3412,6 +3456,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -3453,7 +3498,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3465,6 +3509,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3500,6 +3545,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -3531,7 +3577,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3543,6 +3588,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3581,6 +3627,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3657,6 +3704,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -3669,7 +3717,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3682,6 +3729,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3692,8 +3740,8 @@ FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2 + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted") AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -3781,6 +3829,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3821,7 +3870,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3834,6 +3882,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3847,7 +3896,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -3860,6 +3908,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -3944,6 +3993,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3989,6 +4039,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4049,6 +4100,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4128,6 +4180,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4168,7 +4221,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4181,6 +4233,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4194,7 +4247,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -4207,6 +4259,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -4279,6 +4332,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4339,6 +4393,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4387,6 +4442,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4596,6 +4652,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4714,6 +4771,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4754,7 +4812,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4767,6 +4824,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4780,7 +4838,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -4793,6 +4850,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -4841,7 +4899,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4853,6 +4910,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4891,6 +4949,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4967,6 +5026,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -4991,6 +5051,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -5029,6 +5090,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5133,6 +5195,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5184,6 +5247,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -5225,6 +5289,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5290,6 +5355,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5355,6 +5421,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5452,6 +5519,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -5489,6 +5557,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5580,6 +5649,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5645,6 +5715,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5742,6 +5813,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -5779,6 +5851,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5896,6 +5969,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5940,6 +6014,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6008,6 +6083,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6039,7 +6115,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6051,6 +6126,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6089,6 +6165,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6165,6 +6242,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6203,6 +6281,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6310,6 +6389,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6361,6 +6441,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6402,6 +6483,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6467,6 +6549,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6532,6 +6615,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6629,6 +6713,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6666,6 +6751,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6790,6 +6876,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6834,6 +6921,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6890,6 +6978,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -6921,7 +7010,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6933,6 +7021,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6971,6 +7060,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7061,6 +7151,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7112,6 +7203,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -7150,6 +7242,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7243,6 +7336,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7294,6 +7388,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -7335,6 +7430,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7400,6 +7496,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7465,6 +7562,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7576,6 +7674,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -7613,6 +7712,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7730,6 +7830,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7774,6 +7875,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -7830,6 +7932,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -7872,7 +7975,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -7884,6 +7986,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -7922,6 +8025,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7998,6 +8102,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -8010,7 +8115,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8023,6 +8127,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8033,8 +8138,8 @@ FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2 + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted") AND "posthog_dashboard"."id" = 2) LIMIT 21 ''' @@ -8122,6 +8227,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -8162,7 +8268,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8175,6 +8280,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8188,7 +8294,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -8201,6 +8306,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -8303,6 +8409,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -8343,7 +8450,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8356,6 +8462,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8369,7 +8476,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -8382,6 +8488,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -8462,7 +8569,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8474,6 +8580,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8512,6 +8619,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -8609,6 +8717,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -8649,7 +8758,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8662,6 +8770,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8675,7 +8784,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -8688,6 +8796,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -8736,7 +8845,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -8748,6 +8856,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -8786,6 +8895,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -8978,6 +9088,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -9096,6 +9207,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -9136,7 +9248,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -9149,6 +9260,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -9162,7 +9274,6 @@ T6."first_name", T6."last_name", T6."is_staff", - T6."is_active", T6."date_joined", T6."uuid", T6."current_organization_id", @@ -9175,6 +9286,7 @@ T6."requested_password_reset_at", T6."has_seen_product_intro_for", T6."strapi_id", + T6."is_active", T6."theme_mode", T6."partial_notification_settings", T6."anonymize_data", @@ -9223,7 +9335,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -9235,6 +9346,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -9273,6 +9385,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -9368,8 +9481,8 @@ SELECT COUNT(*) AS "__count" FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted")) ''' # --- # name: TestDashboard.test_retrieve_dashboard_list.29 @@ -9384,6 +9497,7 @@ "posthog_dashboard"."deleted", "posthog_dashboard"."last_accessed_at", "posthog_dashboard"."filters", + "posthog_dashboard"."variables", "posthog_dashboard"."creation_mode", "posthog_dashboard"."restriction_level", "posthog_dashboard"."deprecated_tags", @@ -9396,7 +9510,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -9409,6 +9522,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -9419,8 +9533,8 @@ FROM "posthog_dashboard" INNER JOIN "posthog_team" ON ("posthog_dashboard"."team_id" = "posthog_team"."id") LEFT OUTER JOIN "posthog_user" ON ("posthog_dashboard"."created_by_id" = "posthog_user"."id") - WHERE (NOT ("posthog_dashboard"."deleted") - AND "posthog_team"."project_id" = 2) + WHERE ("posthog_team"."project_id" = 2 + AND NOT ("posthog_dashboard"."deleted")) ORDER BY "posthog_dashboard"."pinned" DESC, "posthog_dashboard"."name" ASC LIMIT 100 @@ -9562,6 +9676,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index d3e7e43d7f200..ef97a1e6bd64b 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -13,7 +13,9 @@ from posthog.constants import AvailableFeature from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query from posthog.models import Dashboard, DashboardTile, Filter, Insight, Team, User +from posthog.models.insight_variable import InsightVariable from posthog.models.organization import Organization +from posthog.models.project import Project from posthog.models.sharing_configuration import SharingConfiguration from posthog.models.signals import mute_selected_signals from posthog.test.base import ( @@ -82,6 +84,33 @@ def test_retrieve_dashboard_list(self): dashboard_names, ) + def test_retrieve_dashboard_list_includes_other_environments(self): + other_team_in_project = Team.objects.create(organization=self.organization, project=self.project) + _, team_in_other_project = Project.objects.create_with_team( + organization=self.organization, initiating_user=self.user + ) + + dashboard_a_id, _ = self.dashboard_api.create_dashboard({"name": "A"}, team_id=self.team.id) + dashboard_b_id, _ = self.dashboard_api.create_dashboard({"name": "B"}, team_id=other_team_in_project.id) + self.dashboard_api.create_dashboard({"name": "C"}, team_id=team_in_other_project.id) + + response_project_data = self.dashboard_api.list_dashboards(self.project.id) + response_env_current_data = self.dashboard_api.list_dashboards(self.team.id, parent="environment") + response_env_other_data = self.dashboard_api.list_dashboards(other_team_in_project.id, parent="environment") + + self.assertEqual( + {dashboard["id"] for dashboard in response_project_data["results"]}, + {dashboard_a_id, dashboard_b_id}, + ) + self.assertEqual( + {dashboard["id"] for dashboard in response_env_current_data["results"]}, + {dashboard_a_id, dashboard_b_id}, + ) + self.assertEqual( + {dashboard["id"] for dashboard in response_env_other_data["results"]}, + {dashboard_a_id, dashboard_b_id}, + ) + @snapshot_postgres_queries def test_retrieve_dashboard(self): dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) @@ -555,7 +584,9 @@ def test_dashboard_insights_out_of_synch_with_tiles_are_not_shown(self): mock_view.action = "retrieve" mock_request = MagicMock() mock_request.query_params.get.return_value = None - dashboard_data = DashboardSerializer(dashboard, context={"view": mock_view, "request": mock_request}).data + dashboard_data = DashboardSerializer( + dashboard, context={"view": mock_view, "request": mock_request, "get_team": lambda: self.team} + ).data assert len(dashboard_data["tiles"]) == 1 def test_dashboard_insight_tiles_can_be_loaded_correct_context(self): @@ -1361,3 +1392,52 @@ def test_dashboard_duplication_breakdown_histogram_bin_count_none(self): for item in response["tiles"]: self.assertNotEqual(item.get("dashboard", None), existing_dashboard.pk) + + def test_dashboard_variables(self): + variable = InsightVariable.objects.create( + team=self.team, name="Test 1", code_name="test_1", default_value="some_default_value", type="String" + ) + dashboard = Dashboard.objects.create( + team=self.team, + name="dashboard 1", + created_by=self.user, + variables={ + str(variable.id): { + "code_name": variable.code_name, + "variableId": str(variable.id), + "value": "some override value", + } + }, + ) + insight = Insight.objects.create( + filters={}, + query={ + "kind": "DataVisualizationNode", + "source": { + "kind": "HogQLQuery", + "query": "select {variables.test_1}", + "variables": { + str(variable.id): { + "code_name": variable.code_name, + "variableId": str(variable.id), + } + }, + }, + "chartSettings": {}, + "tableSettings": {}, + }, + team=self.team, + last_refresh=now(), + ) + DashboardTile.objects.create(dashboard=dashboard, insight=insight) + + response_data = self.dashboard_api.get_dashboard(dashboard.pk) + + assert response_data["variables"] is not None + assert isinstance(response_data["variables"], dict) + assert len(response_data["variables"].keys()) == 1 + for key, value in response_data["variables"].items(): + assert key == str(variable.id) + assert value["code_name"] == variable.code_name + assert value["variableId"] == str(variable.id) + assert value["value"] == "some override value" diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index 8861241214a3b..aa93daa8f662b 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -7,7 +7,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -19,6 +18,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -57,6 +57,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -97,7 +98,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -109,6 +109,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -147,6 +148,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -238,7 +240,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -251,6 +252,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -306,7 +308,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -318,6 +319,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -356,6 +358,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -458,6 +461,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -498,7 +502,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -511,6 +514,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -524,7 +528,6 @@ T5."first_name", T5."last_name", T5."is_staff", - T5."is_active", T5."date_joined", T5."uuid", T5."current_organization_id", @@ -537,6 +540,7 @@ T5."requested_password_reset_at", T5."has_seen_product_intro_for", T5."strapi_id", + T5."is_active", T5."theme_mode", T5."partial_notification_settings", T5."anonymize_data", @@ -601,7 +605,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -614,6 +617,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/api/test/test_activity_log.py b/posthog/api/test/test_activity_log.py index 18f0c9079df45..2edd7219b71a3 100644 --- a/posthog/api/test/test_activity_log.py +++ b/posthog/api/test/test_activity_log.py @@ -298,7 +298,7 @@ def test_notifications_viewed_n_plus_1(self) -> None: user=user, defaults={"last_viewed_activity_date": f"2023-0{i}-17T04:36:50Z"} ) - with self.assertNumQueries(FuzzyInt(37, 37)): + with self.assertNumQueries(FuzzyInt(39, 39)): self.client.get(f"/api/projects/{self.team.id}/activity_log/important_changes") def test_can_list_all_activity(self) -> None: diff --git a/posthog/api/test/test_alert.py b/posthog/api/test/test_alert.py index e1a1fcaccd836..4c56520f15027 100644 --- a/posthog/api/test/test_alert.py +++ b/posthog/api/test/test_alert.py @@ -6,6 +6,10 @@ from posthog.test.base import APIBaseTest, QueryMatchingTest from posthog.models.team import Team +from posthog.schema import InsightThresholdType, AlertState +from posthog.models import AlertConfiguration +from posthog.models.alert import AlertCheck +from datetime import datetime class TestAlert(APIBaseTest, QueryMatchingTest): @@ -33,7 +37,7 @@ def test_create_and_delete_alert(self) -> None: ], "config": {"type": "TrendsAlertConfig", "series_index": 0}, "name": "alert name", - "threshold": {"configuration": {}}, + "threshold": {"configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}}, "calculation_interval": "daily", } response = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) @@ -52,13 +56,14 @@ def test_create_and_delete_alert(self) -> None: "state": "Not firing", "config": {"type": "TrendsAlertConfig", "series_index": 0}, "threshold": { - "configuration": {}, + "configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}, "created_at": mock.ANY, "id": mock.ANY, "name": "", }, "last_checked_at": None, "next_check_at": None, + "snoozed_until": None, } assert response.status_code == status.HTTP_201_CREATED, response.content assert response.json() == expected_alert_json @@ -107,7 +112,7 @@ def test_create_and_list_alert(self) -> None: "subscribed_users": [ self.user.id, ], - "threshold": {"configuration": {}}, + "threshold": {"configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}}, "name": "alert name", } alert = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() @@ -133,7 +138,7 @@ def test_alert_limit(self) -> None: "subscribed_users": [ self.user.id, ], - "threshold": {"configuration": {}}, + "threshold": {"configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}}, "name": "alert name", } self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request) @@ -151,7 +156,7 @@ def test_alert_is_deleted_on_insight_update(self) -> None: "subscribed_users": [ self.user.id, ], - "threshold": {"configuration": {}}, + "threshold": {"configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}}, "name": "alert name", } alert = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() @@ -176,3 +181,33 @@ def test_alert_is_deleted_on_insight_update(self) -> None: response = self.client.get(f"/api/projects/{self.team.id}/alerts/{alert['id']}") assert response.status_code == status.HTTP_404_NOT_FOUND + + def test_snooze_alert(self) -> None: + creation_request = { + "insight": self.insight["id"], + "subscribed_users": [ + self.user.id, + ], + "threshold": {"configuration": {"type": InsightThresholdType.ABSOLUTE, "bounds": {}}}, + "name": "alert name", + "state": AlertState.FIRING, + } + + alert = self.client.post(f"/api/projects/{self.team.id}/alerts", creation_request).json() + assert alert["state"] == AlertState.NOT_FIRING + + alert = AlertConfiguration.objects.get(pk=alert["id"]) + alert.state = AlertState.FIRING + alert.save() + + firing_alert = AlertConfiguration.objects.get(pk=alert.id) + assert firing_alert.state == AlertState.FIRING + + resolved_alert = self.client.patch( + f"/api/projects/{self.team.id}/alerts/{firing_alert.id}", {"snoozed_until": datetime.now()} + ).json() + assert resolved_alert["state"] == AlertState.SNOOZED + + # should also create a new alert check with resolution + check = AlertCheck.objects.filter(alert_configuration=firing_alert.id).latest("created_at") + assert check.state == AlertState.SNOOZED diff --git a/posthog/api/test/test_cohort.py b/posthog/api/test/test_cohort.py index 043d786e25241..52dea5f41a9e0 100644 --- a/posthog/api/test/test_cohort.py +++ b/posthog/api/test/test_cohort.py @@ -1,4 +1,5 @@ import json +from ee.clickhouse.materialized_columns.analyze import materialize from datetime import datetime, timedelta from typing import Optional, Any from unittest import mock @@ -12,11 +13,11 @@ from posthog.api.test.test_exports import TestExportMixin from posthog.clickhouse.client.execute import sync_execute -from posthog.models import FeatureFlag, Person +from posthog.models import FeatureFlag, Person, Action from posthog.models.async_deletion.async_deletion import AsyncDeletion, DeletionType from posthog.models.cohort import Cohort from posthog.models.team.team import Team -from posthog.schema import PropertyOperator +from posthog.schema import PropertyOperator, PersonsOnEventsMode from posthog.tasks.calculate_cohort import calculate_cohort_ch, calculate_cohort_from_list from posthog.tasks.tasks import clickhouse_clear_removed_data from posthog.test.base import ( @@ -143,6 +144,89 @@ def test_creating_update_and_calculating(self, patch_sync_execute, patch_calcula }, ) + @patch("posthog.api.cohort.report_user_action") + @patch("posthog.tasks.calculate_cohort.calculate_cohort_ch.delay", side_effect=calculate_cohort_ch) + @patch("posthog.models.cohort.util.sync_execute", side_effect=sync_execute) + def test_action_persons_on_events(self, patch_sync_execute, patch_calculate_cohort, patch_capture): + materialize("events", "team_id", table_column="person_properties") + self.team.modifiers = {"personsOnEventsMode": PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS} + self.team.save() + _create_person( + team=self.team, + distinct_ids=[f"person_1"], + properties={"team_id": 5}, + ) + _create_person( + team=self.team, + distinct_ids=[f"person_2"], + properties={"team_id": 6}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person_1", + timestamp=datetime.now() - timedelta(hours=12), + ) + action = Action.objects.create( + team=self.team, + steps_json=[ + { + "event": "$pageview", + "properties": [{"key": "team_id", "type": "person", "value": 5}], + } + ], + ) + + # Make sure the endpoint works with and without the trailing slash + response = self.client.post( + f"/api/projects/{self.team.id}/cohorts", + data={ + "name": "whatever", + "filters": { + "properties": { + "type": "OR", + "values": [ + { + "type": "AND", + "values": [ + { + "key": action.pk, + "type": "behavioral", + "value": "performed_event", + "negation": False, + "event_type": "actions", + "time_value": 30, + "time_interval": "day", + "explicit_datetime": "-30d", + } + ], + } + ], + } + }, + }, + ) + self.assertEqual(response.status_code, 201, response.content) + self.assertEqual(response.json()["created_by"]["id"], self.user.pk) + self.assertEqual(patch_calculate_cohort.call_count, 1) + self.assertEqual(patch_capture.call_count, 1) + + with self.capture_queries_startswith("INSERT INTO cohortpeople") as insert_statements: + response = self.client.patch( + f"/api/projects/{self.team.id}/cohorts/{response.json()['id']}", + data={ + "name": "whatever2", + "description": "A great cohort!", + "groups": [{"properties": {"team_id": 6}}], + "created_by": "something something", + "last_calculation": "some random date", + "errors_calculating": 100, + "deleted": False, + }, + ) + + self.assertIn(f"mat_pp_team_id", insert_statements[0]) + @patch("posthog.api.cohort.report_user_action") @patch("posthog.tasks.calculate_cohort.calculate_cohort_ch.delay") def test_list_cohorts_is_not_nplus1(self, patch_calculate_cohort, patch_capture): diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index 2bd47011c93c8..3fa1fd394d102 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -108,7 +108,7 @@ def _update_team(self, data, expected_status_code: int = status.HTTP_200_OK): client = Client() client.force_login(self.user) - response = client.patch("/api/projects/@current/", data, content_type="application/json") + response = client.patch("/api/environments/@current/", data, content_type="application/json") self.assertEqual(response.status_code, expected_status_code) client.logout() @@ -166,6 +166,7 @@ def test_user_session_recording_opt_in(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) @@ -185,6 +186,7 @@ def test_user_console_log_opt_in(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } def test_user_performance_opt_in(self, *args): @@ -302,6 +304,26 @@ def test_session_recording_linked_flag_variant(self, *args): response = self._post_decide().json() self.assertEqual(response["sessionRecording"]["linkedFlag"], {"flag": "my-flag", "variant": "test"}) + def test_session_recording_url_trigger_patterns(self, *args): + self._update_team( + { + "session_recording_url_trigger_config": [{"url": "/replay-examples/", "matching": "regex"}], + "session_recording_opt_in": True, + } + ) + + response = self._post_decide(origin="capacitor://localhost:8000/home").json() + assert response["sessionRecording"] == { + "endpoint": "/s/", + "recorderVersion": "v2", + "consoleLogRecordingEnabled": True, + "sampleRate": None, + "linkedFlag": None, + "minimumDurationMilliseconds": None, + "networkPayloadCapture": None, + "urlTriggers": [{"url": "/replay-examples/", "matching": "regex"}], + } + def test_session_recording_network_payload_capture_config(self, *args): # :TRICKY: Test for regression around caching @@ -430,6 +452,7 @@ def test_user_session_recording_opt_in_wildcard_domain(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) @@ -457,6 +480,7 @@ def test_user_session_recording_evil_site(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } def test_user_autocapture_opt_out(self, *args): @@ -491,6 +515,7 @@ def test_user_session_recording_allowed_when_no_permitted_domains_are_set(self, "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } def test_user_session_recording_allowed_for_android(self, *args) -> None: @@ -505,6 +530,7 @@ def test_user_session_recording_allowed_for_android(self, *args) -> None: "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } def test_user_session_recording_allowed_for_ios(self, *args) -> None: @@ -519,6 +545,7 @@ def test_user_session_recording_allowed_for_ios(self, *args) -> None: "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } def test_user_session_recording_allowed_when_permitted_domains_are_not_http_based(self, *args): @@ -538,6 +565,7 @@ def test_user_session_recording_allowed_when_permitted_domains_are_not_http_base "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], } @snapshot_postgres_queries @@ -2901,6 +2929,7 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], }, ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) @@ -2929,6 +2958,7 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], }, ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) @@ -3614,7 +3644,7 @@ def _update_team(self, data): client = Client() client.force_login(self.user) - response = client.patch("/api/projects/@current/", data, content_type="application/json") + response = client.patch("/api/environments/@current/", data, content_type="application/json") self.assertEqual(response.status_code, status.HTTP_200_OK) client.logout() @@ -3724,6 +3754,7 @@ def test_decide_doesnt_error_out_when_database_is_down_and_database_check_isnt_c "linkedFlag": None, "minimumDurationMilliseconds": None, "networkPayloadCapture": None, + "urlTriggers": [], }, ) self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index bdb41ae129493..f43e49aaee91c 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -739,9 +739,9 @@ def test_updating_feature_flag(self, mock_capture): }, { "type": "FeatureFlag", - "action": "changed", + "action": "created", "field": "filters", - "before": {}, + "before": None, "after": { "groups": [ { @@ -843,9 +843,9 @@ def test_get_feature_flag_activity(self): "changes": [ { "type": "FeatureFlag", - "action": "changed", + "action": "created", "field": "filters", - "before": {}, + "before": None, "after": {"groups": [{"properties": [], "rollout_percentage": 74}]}, } ], @@ -948,9 +948,9 @@ def test_get_feature_flag_activity_for_all_flags(self): "changes": [ { "type": "FeatureFlag", - "action": "changed", + "action": "created", "field": "filters", - "before": {}, + "before": None, "after": {"groups": [{"properties": [], "rollout_percentage": 74}]}, } ], @@ -5151,6 +5151,7 @@ def test_feature_flags_v3_with_group_properties(self, *args): self.user = User.objects.create_and_join(self.organization, "random@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5185,7 +5186,7 @@ def test_feature_flags_v3_with_group_properties(self, *args): ], }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5200,7 +5201,7 @@ def test_feature_flags_v3_with_group_properties(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5256,6 +5257,7 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): self.user = User.objects.create_and_join(self.organization, "random@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5286,7 +5288,7 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): ] }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5298,7 +5300,7 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): "key": "default-flag", "filters": {"groups": [{"properties": [], "rollout_percentage": None}]}, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5354,6 +5356,7 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): self.user = User.objects.create_and_join(self.organization, "random@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5384,7 +5387,7 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): ] }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5396,7 +5399,7 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): "key": "default-flag", "filters": {"groups": [{"properties": [], "rollout_percentage": None}]}, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5454,6 +5457,7 @@ def test_feature_flags_v3_with_skip_database_setting(self, mock_postgres_check): self.user = User.objects.create_and_join(self.organization, "random@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5484,7 +5488,7 @@ def test_feature_flags_v3_with_skip_database_setting(self, mock_postgres_check): ] }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5496,7 +5500,7 @@ def test_feature_flags_v3_with_skip_database_setting(self, mock_postgres_check): "key": "default-flag", "filters": {"groups": [{"properties": [], "rollout_percentage": None}]}, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5661,6 +5665,7 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, self.user = User.objects.create_and_join(self.organization, "randomXYZ@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5695,7 +5700,7 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, ], }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5710,7 +5715,7 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, "groups": [{"properties": [], "rollout_percentage": None}], }, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5782,6 +5787,7 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ self.user = User.objects.create_and_join(self.organization, "random12@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5813,7 +5819,7 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ }, "ensure_experience_continuity": True, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5825,7 +5831,7 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ "key": "default-flag", "filters": {"groups": [{"properties": [], "rollout_percentage": None}]}, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5878,6 +5884,7 @@ def test_feature_flags_v3_with_experience_continuity_and_incident_mode(self, moc self.user = User.objects.create_and_join(self.organization, "random12@test.com", "password", "first_name") team_id = self.team.pk + project_id = self.team.project_id rf = RequestFactory() create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user @@ -5909,7 +5916,7 @@ def test_feature_flags_v3_with_experience_continuity_and_incident_mode(self, moc }, "ensure_experience_continuity": True, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() @@ -5921,7 +5928,7 @@ def test_feature_flags_v3_with_experience_continuity_and_incident_mode(self, moc "key": "default-flag", "filters": {"groups": [{"properties": [], "rollout_percentage": None}]}, }, - context={"team_id": team_id, "request": create_request}, + context={"team_id": team_id, "project_id": project_id, "request": create_request}, ) self.assertTrue(serialized_data.is_valid()) serialized_data.save() diff --git a/posthog/api/test/test_hog_function.py b/posthog/api/test/test_hog_function.py index 803366b9ad7da..b848117c81ca5 100644 --- a/posthog/api/test/test_hog_function.py +++ b/posthog/api/test/test_hog_function.py @@ -69,7 +69,7 @@ def get_db_field_value(field, model_id): class TestHogFunctionAPIWithoutAvailableFeature(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): - def create_slack_function(self, data: Optional[dict] = None): + def _create_slack_function(self, data: Optional[dict] = None): payload = { "name": "Slack", "template_id": template_slack.id, @@ -87,15 +87,14 @@ def create_slack_function(self, data: Optional[dict] = None): ) def test_create_hog_function_works_for_free_template(self): - response = self.create_slack_function() - + response = self._create_slack_function() assert response.status_code == status.HTTP_201_CREATED, response.json() assert response.json()["created_by"]["id"] == self.user.id assert response.json()["hog"] == template_slack.hog assert response.json()["inputs_schema"] == template_slack.inputs_schema def test_free_users_cannot_override_hog_or_schema(self): - response = self.create_slack_function( + response = self._create_slack_function( { "hog": "fetch(inputs.url);", "inputs_schema": [ @@ -108,13 +107,13 @@ def test_free_users_cannot_override_hog_or_schema(self): assert response.json()["detail"] == "The Data Pipelines addon is required to create custom functions." def test_free_users_cannot_use_without_template(self): - response = self.create_slack_function({"template_id": None}) + response = self._create_slack_function({"template_id": None}) assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() assert response.json()["detail"] == "The Data Pipelines addon is required to create custom functions." def test_free_users_cannot_use_non_free_templates(self): - response = self.create_slack_function( + response = self._create_slack_function( { "template_id": template_webhook.id, } @@ -133,6 +132,28 @@ def setUp(self): ] self.organization.save() + def _get_function_activity( + self, + function_id: Optional[int] = None, + ) -> list: + params: dict = {"scope": "HogFunction", "page": 1, "limit": 20} + if function_id: + params["item_id"] = function_id + activity = self.client.get(f"/api/projects/{self.team.pk}/activity_log", data=params) + self.assertEqual(activity.status_code, status.HTTP_200_OK) + return activity.json().get("results") + + def _filter_expected_keys(self, actual_data, expected_structure): + if isinstance(expected_structure, list) and expected_structure and isinstance(expected_structure[0], dict): + return [self._filter_expected_keys(item, expected_structure[0]) for item in actual_data] + elif isinstance(expected_structure, dict): + return { + key: self._filter_expected_keys(actual_data.get(key), expected_value) + for key, expected_value in expected_structure.items() + } + else: + return actual_data + def test_create_hog_function(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", @@ -159,6 +180,33 @@ def test_create_hog_function(self, *args): "status": {"rating": 0, "state": 0, "tokens": 0}, } + id = response.json()["id"] + expected_activities = [ + { + "activity": "created", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": None, + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + actual_activities = self._get_function_activity(id) + filtered_actual_activities = [ + self._filter_expected_keys(actual_activity, expected_activity) + for actual_activity, expected_activity in zip(actual_activities, expected_activities) + ] + assert filtered_actual_activities == expected_activities + def test_creates_with_template_id(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", @@ -190,6 +238,7 @@ def test_deletes_via_update(self, *args): data={"name": "Fetch URL", "description": "Test description", "hog": "fetch(inputs.url);"}, ) assert response.status_code == status.HTTP_201_CREATED, response.json() + id = response.json()["id"] list_res = self.client.get(f"/api/projects/{self.team.id}/hog_functions/") assert list_res.status_code == status.HTTP_200_OK, list_res.json() @@ -208,6 +257,57 @@ def test_deletes_via_update(self, *args): assert list_res.status_code == status.HTTP_200_OK, list_res.json() assert next((item for item in list_res.json()["results"] if item["id"] == response.json()["id"]), None) is None + expected_activities = [ + { + "activity": "updated", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": [ + { + "action": "changed", + "after": True, + "before": False, + "field": "deleted", + "type": "HogFunction", + } + ], + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + { + "activity": "created", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": None, + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + actual_activities = self._get_function_activity(id) + filtered_actual_activities = [ + self._filter_expected_keys(actual_activity, expected_activity) + for actual_activity, expected_activity in zip(actual_activities, expected_activities) + ] + assert filtered_actual_activities == expected_activities + def test_inputs_required(self, *args): payload = { "name": "Fetch URL", @@ -355,6 +455,7 @@ def test_secret_inputs_updated_if_changed(self, *args): }, } res = self.client.post(f"/api/projects/{self.team.id}/hog_functions/", data={**payload}) + id = res.json()["id"] assert res.json()["inputs"] == {"secret1": {"secret": True}}, res.json() res = self.client.patch( f"/api/projects/{self.team.id}/hog_functions/{res.json()['id']}", @@ -376,6 +477,58 @@ def test_secret_inputs_updated_if_changed(self, *args): assert obj.encrypted_inputs["secret1"]["value"] == "I AM CHANGED" assert obj.encrypted_inputs["secret2"]["value"] == "I AM ALSO SECRET" + # changes to encrypted inputs aren't persisted + expected_activities = [ + { + "activity": "updated", + "created_at": ANY, + "detail": { + "changes": [ + { + "action": "changed", + "after": "masked", + "before": "masked", + "field": "encrypted_inputs", + "type": "HogFunction", + } + ], + "name": "Fetch URL", + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + { + "activity": "created", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": None, + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + actual_activities = self._get_function_activity(id) + filtered_actual_activities = [ + self._filter_expected_keys(actual_activity, expected_activity) + for actual_activity, expected_activity in zip(actual_activities, expected_activities) + ] + assert filtered_actual_activities == expected_activities + def test_generates_hog_bytecode(self, *args): response = self.client.post( f"/api/projects/{self.team.id}/hog_functions/", @@ -616,6 +769,7 @@ def test_patches_status_on_enabled_update(self, *args): f"/api/projects/{self.team.id}/hog_functions/", data={"name": "Fetch URL", "hog": "fetch(inputs.url);", "enabled": True}, ) + id = response.json()["id"] assert response.json()["status"]["state"] == 4 @@ -637,6 +791,82 @@ def test_patches_status_on_enabled_update(self, *args): json={"state": 2}, ) + expected_activities = [ + { + "activity": "updated", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": [ + { + "action": "changed", + "after": True, + "before": False, + "field": "enabled", + "type": "HogFunction", + } + ], + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + { + "activity": "updated", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": [ + { + "action": "changed", + "after": False, + "before": True, + "field": "enabled", + "type": "HogFunction", + } + ], + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + { + "activity": "created", + "created_at": ANY, + "detail": { + "name": "Fetch URL", + "changes": None, + "short_id": None, + "trigger": None, + "type": "destination", + }, + "item_id": id, + "scope": "HogFunction", + "user": { + "email": "user1@posthog.com", + "first_name": "", + }, + }, + ] + actual_activities = self._get_function_activity(id) + filtered_actual_activities = [ + self._filter_expected_keys(actual_activity, expected_activity) + for actual_activity, expected_activity in zip(actual_activities, expected_activities) + ] + assert filtered_actual_activities == expected_activities + def test_list_with_filters_filter(self, *args): action1 = Action.objects.create( team=self.team, diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index 6834ae2d6c40f..32cd4da9df83f 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -33,6 +33,8 @@ User, ) from posthog.models.insight_caching_state import InsightCachingState +from posthog.models.insight_variable import InsightVariable +from posthog.models.project import Project from posthog.schema import ( DataTableNode, DataVisualizationNode, @@ -91,6 +93,43 @@ def test_get_insight_items(self) -> None: self.assertEqual(len(response["results"]), 1) + def test_get_insight_items_all_environments_included(self) -> None: + filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } + + other_team_in_project = Team.objects.create(organization=self.organization, project=self.project) + _, team_in_other_project = Project.objects.create_with_team( + organization=self.organization, initiating_user=self.user + ) + + insight_a = Insight.objects.create( + filters=Filter(data=filter_dict).to_dict(), + team=self.team, + created_by=self.user, + ) + insight_b = Insight.objects.create( + filters=Filter(data=filter_dict).to_dict(), + team=other_team_in_project, + created_by=self.user, + ) + Insight.objects.create( + filters=Filter(data=filter_dict).to_dict(), + team=team_in_other_project, + created_by=self.user, + ) + + # All of these three ways should return the same set of insights, + # i.e. all insights in the test project regardless of environment + response_project = self.client.get(f"/api/projects/{self.project.id}/insights/").json() + response_env_current = self.client.get(f"/api/environments/{self.team.id}/insights/").json() + response_env_other = self.client.get(f"/api/environments/{other_team_in_project.id}/insights/").json() + + self.assertEqual({insight["id"] for insight in response_project["results"]}, {insight_a.id, insight_b.id}) + self.assertEqual({insight["id"] for insight in response_env_current["results"]}, {insight_a.id, insight_b.id}) + self.assertEqual({insight["id"] for insight in response_env_other["results"]}, {insight_a.id, insight_b.id}) + @patch("posthoganalytics.capture") def test_created_updated_and_last_modified(self, mock_capture: mock.Mock) -> None: alt_user = User.objects.create_and_join(self.organization, "team2@posthog.com", None) @@ -339,8 +378,10 @@ def test_get_insight_in_shared_context(self) -> None: mock.ANY, dashboard=mock.ANY, execution_mode=ExecutionMode.EXTENDED_CACHE_CALCULATE_ASYNC_IF_STALE, + team=self.team, user=mock.ANY, filters_override=None, + variables_override=None, ) with patch( @@ -351,8 +392,10 @@ def test_get_insight_in_shared_context(self) -> None: mock.ANY, dashboard=mock.ANY, execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + team=self.team, user=mock.ANY, filters_override=None, + variables_override=None, ) def test_get_insight_by_short_id(self) -> None: @@ -3556,3 +3599,60 @@ def test_insight_returns_cached_types(self) -> None: self.assertNotIn("code", response) self.assertIsNotNone(response["results"][0]["types"]) + + def test_insight_variables_overrides(self): + dashboard = Dashboard.objects.create( + team=self.team, + name="dashboard 1", + created_by=self.user, + ) + variable = InsightVariable.objects.create( + team=self.team, name="Test 1", code_name="test_1", default_value="some_default_value", type="String" + ) + insight = Insight.objects.create( + filters={}, + query={ + "kind": "DataVisualizationNode", + "source": { + "kind": "HogQLQuery", + "query": "select {variables.test_1}", + "variables": { + str(variable.id): { + "code_name": variable.code_name, + "variableId": str(variable.id), + } + }, + }, + "chartSettings": {}, + "tableSettings": {}, + }, + team=self.team, + ) + DashboardTile.objects.create(dashboard=dashboard, insight=insight) + + response = self.client.get( + f"/api/projects/{self.team.id}/insights/{insight.pk}", + data={ + "from_dashboard": dashboard.pk, + "variables_override": json.dumps( + { + str(variable.id): { + "code_name": variable.code_name, + "variableId": str(variable.id), + "value": "override value!", + } + } + ), + }, + ).json() + + assert isinstance(response["query"], dict) + assert isinstance(response["query"]["source"], dict) + assert isinstance(response["query"]["source"]["variables"], dict) + + assert len(response["query"]["source"]["variables"].keys()) == 1 + for key, value in response["query"]["source"]["variables"].items(): + assert key == str(variable.id) + assert value["code_name"] == variable.code_name + assert value["variableId"] == str(variable.id) + assert value["value"] == "override value!" diff --git a/posthog/api/test/test_insight_variable.py b/posthog/api/test/test_insight_variable.py new file mode 100644 index 0000000000000..2b6f09ef8ed89 --- /dev/null +++ b/posthog/api/test/test_insight_variable.py @@ -0,0 +1,33 @@ +from posthog.models.insight_variable import InsightVariable +from posthog.test.base import APIBaseTest + + +class TestInsightVariable(APIBaseTest): + def test_create_insight_variable(self): + response = self.client.post( + f"/api/projects/{self.team.pk}/insight_variables/", data={"name": "Test 1", "type": "String"} + ) + + assert response.status_code == 201 + + variable = InsightVariable.objects.get(team_id=self.team.pk) + + assert variable is not None + assert variable.created_by is not None + assert variable.created_at is not None + assert variable.name == "Test 1" + assert variable.type == "String" + assert variable.code_name == "test_1" + + def test_no_duplicate_code_names(self): + InsightVariable.objects.create(team=self.team, name="Test 1", code_name="test_1") + + response = self.client.post( + f"/api/projects/{self.team.pk}/insight_variables/", data={"name": "Test 1", "type": "String"} + ) + + assert response.status_code == 400 + + variable_count = InsightVariable.objects.filter(team_id=self.team.pk).count() + + assert variable_count == 1 diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index 170958799c5f6..462a947ad9b4f 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -1104,3 +1104,12 @@ def test_destroy(self): response = self.client.delete(f"/api/projects/{self.team.id}/query/{self.valid_query_id}/") self.assertEqual(response.status_code, 204) self.assertEqual(self.redis_client_mock.delete.call_count, 2) + + +class TestQueryDraftSql(APIBaseTest): + @patch("posthog.hogql.ai.hit_openai", return_value=("SELECT 1", 21, 37)) + def test_draft_sql(self, hit_openai_mock): + response = self.client.get(f"/api/projects/{self.team.id}/query/draft_sql/", {"prompt": "I need the number 1"}) + self.assertEqual(response.status_code, 200) + self.assertEqual(response.json(), {"sql": "SELECT 1"}) + hit_openai_mock.assert_called_once() diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 4f171d91b6c14..c6de95a44702e 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -2371,6 +2371,19 @@ def test_can_create_recurring_survey(self): assert len(response_data["iteration_start_dates"]) == 2 assert response_data["current_iteration"] == 1 + def test_can_create_and_launch_recurring_survey(self): + survey = self._create_recurring_survey() + response = self.client.patch( + f"/api/projects/{self.team.id}/surveys/{survey.id}/", + data={ + "start_date": datetime.now() - timedelta(days=1), + }, + ) + response_data = response.json() + assert response_data["iteration_start_dates"] is not None + assert len(response_data["iteration_start_dates"]) == 2 + assert response_data["current_iteration"] == 1 + def test_can_set_internal_targeting_flag(self): survey = self._create_recurring_survey() response = self.client.patch( @@ -2493,7 +2506,7 @@ def test_guards_for_nil_iteration_count(self): ) assert response.status_code == status.HTTP_200_OK survey.refresh_from_db() - self.assertIsNone(survey.current_iteration) + self.assertIsNotNone(survey.current_iteration) response = self.client.patch( f"/api/projects/{self.team.id}/surveys/{survey.id}/", data={ diff --git a/posthog/api/utils.py b/posthog/api/utils.py index 69abed44fd27f..514534990a8f0 100644 --- a/posthog/api/utils.py +++ b/posthog/api/utils.py @@ -312,7 +312,7 @@ def create_event_definitions_sql( SELECT {",".join(event_definition_fields)} FROM posthog_eventdefinition {enterprise_join} - WHERE team_id = %(team_id)s {conditions} + WHERE team_id = %(project_id)s {conditions} ORDER BY {additional_ordering}name ASC """ diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index 7da32bb9e88cd..8af99a3b2cfd8 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -125,24 +125,33 @@ def get_cache_type(cacheable: Optional[FilterType] | Optional[dict]) -> CacheTyp def calculate_for_query_based_insight( insight: Insight, *, + team: Team, dashboard: Optional[Dashboard] = None, execution_mode: ExecutionMode, user: Optional[User], filters_override: Optional[dict] = None, + variables_override: Optional[dict] = None, ) -> "InsightResult": from posthog.caching.fetch_from_cache import InsightResult, NothingInCacheResult from posthog.caching.insight_cache import update_cached_state - tag_queries(team_id=insight.team_id, insight_id=insight.pk) + tag_queries(team_id=team.id, insight_id=insight.pk) if dashboard: tag_queries(dashboard_id=dashboard.pk) response = process_response = process_query_dict( - insight.team, + team, insight.query, dashboard_filters_json=( filters_override if filters_override is not None else dashboard.filters if dashboard is not None else None ), + variables_override_json=( + variables_override + if variables_override is not None + else dashboard.variables + if dashboard is not None + else None + ), execution_mode=execution_mode, user=user, insight_id=insight.pk, @@ -161,7 +170,7 @@ def calculate_for_query_based_insight( last_refresh = response.get("last_refresh") if isinstance(cache_key, str) and isinstance(last_refresh, datetime): update_cached_state( # Updating the relevant InsightCachingState - insight.team_id, + team.id, cache_key, last_refresh, result=None, # Not caching the result here, since in HogQL this is the query runner's responsibility diff --git a/posthog/caching/warming.py b/posthog/caching/warming.py index 681cb773ff871..d4a2818aa1964 100644 --- a/posthog/caching/warming.py +++ b/posthog/caching/warming.py @@ -14,6 +14,7 @@ from posthog.caching.utils import largest_teams from posthog.clickhouse.query_tagging import tag_queries from posthog.errors import CHQueryErrorTooManySimultaneousQueries +from posthog.hogql.constants import LimitContext from posthog.hogql_queries.query_cache import QueryCacheManager from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import conversion_to_query_based from posthog.hogql_queries.query_runner import ExecutionMode @@ -126,13 +127,18 @@ def schedule_warming_for_teams_task(): max_retries=3, ) def warm_insight_cache_task(insight_id: int, dashboard_id: Optional[int]): - insight = Insight.objects.get(pk=insight_id) + try: + insight = Insight.objects.get(pk=insight_id) + except Insight.DoesNotExist: + logger.info(f"Warming insight cache failed 404 insight not found: {insight_id}") + return + dashboard = None tag_queries(team_id=insight.team_id, insight_id=insight.pk, trigger="warmingV2") if dashboard_id: tag_queries(dashboard_id=dashboard_id) - dashboard = insight.dashboards.get(pk=dashboard_id) + dashboard = insight.dashboards.filter(pk=dashboard_id).first() with conversion_to_query_based(insight): logger.info(f"Warming insight cache: {insight.pk} for team {insight.team_id} and dashboard {dashboard_id}") @@ -145,6 +151,7 @@ def warm_insight_cache_task(insight_id: int, dashboard_id: Optional[int]): # We need an execution mode with recent cache: # - in case someone refreshed after this task was triggered # - if insight + dashboard combinations have the same cache key, we prevent needless recalculations + limit_context=LimitContext.QUERY_ASYNC, execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, insight_id=insight_id, dashboard_id=dashboard_id, diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index 50fe801363896..30a7ca761f8c6 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -8,6 +8,7 @@ from .clearbit.template_clearbit import template as clearbit from .posthog.template_posthog import template as posthog, TemplatePostHogMigrator from .aws_kinesis.template_aws_kinesis import template as aws_kinesis +from .discord.template_discord import template as discord from .salesforce.template_salesforce import template_create as salesforce_create, template_update as salesforce_update from .mailjet.template_mailjet import ( template_create_contact as mailjet_create_contact, @@ -25,27 +26,36 @@ from .knock.template_knock import template as knock from .meta_ads.template_meta_ads import template as meta_ads from .activecampaign.template_activecampaign import template as activecampaign +from .google_ads.template_google_ads import template as google_ads +from .attio.template_attio import template as attio +from .klaviyo.template_klaviyo import template_user as klaviyo_user, template_event as klaviyo_event from .google_cloud_storage.template_google_cloud_storage import ( template as google_cloud_storage, TemplateGoogleCloudStorageMigrator, ) - +from .airtable.template_airtable import template as airtable HOG_FUNCTION_TEMPLATES = [ slack, webhook, activecampaign, + airtable, + attio, avo, aws_kinesis, braze, clearbit, customerio, + discord, engage, gleap, + google_ads, google_cloud_storage, google_pubsub, hubspot, intercom, + klaviyo_event, + klaviyo_user, knock, loops, mailgun, diff --git a/posthog/cdp/templates/activecampaign/template_activecampaign.py b/posthog/cdp/templates/activecampaign/template_activecampaign.py index a3a9b4c55e985..1f79d51325574 100644 --- a/posthog/cdp/templates/activecampaign/template_activecampaign.py +++ b/posthog/cdp/templates/activecampaign/template_activecampaign.py @@ -41,7 +41,7 @@ }) if (res.status >= 400) { - print(f'Error from {inputs.accountName}.api-us1.com api:', res.status, res.body) + throw Error(f'Error from {inputs.accountName}.api-us1.com (status {res.status}): {res.body}') } else { print('Contact has been created or updated successfully!') } diff --git a/posthog/cdp/templates/airtable/template_airtable.py b/posthog/cdp/templates/airtable/template_airtable.py new file mode 100644 index 0000000000000..f2e0ef2bc6133 --- /dev/null +++ b/posthog/cdp/templates/airtable/template_airtable.py @@ -0,0 +1,82 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + + +template: HogFunctionTemplate = HogFunctionTemplate( + status="alpha", + id="template-airtable", + name="Airtable", + description="Creates Airtable records", + icon_url="/static/services/airtable.png", + category=["Custom"], + hog=""" +let url := f'https://api.airtable.com/v0/{inputs.base_id}/{inputs.table_name}' + +let payload := { + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {inputs.access_token}' + }, + 'body': { + 'fields': inputs.fields, + 'typecast': true + }, + 'method': 'POST' +} + +if (inputs.debug) { + print('Request', url, payload) +} + +let res := fetch(url, payload); + +if (inputs.debug) { + print('Response', res.status, res.body); +} +if (res.status >= 400) { + throw Error(f'Error from api.airtable.com (status {res.status}): {res.body}') +} +""".strip(), + inputs_schema=[ + { + "key": "access_token", + "type": "string", + "label": "Airtable access token", + "secret": True, + "required": True, + "description": "Create this at https://airtable.com/create/tokens", + }, + { + "key": "base_id", + "type": "string", + "label": "Airtable base ID", + "secret": False, + "required": True, + "description": "Find this at https://airtable.com/developers/web/api/introduction", + }, + { + "key": "table_name", + "type": "string", + "label": "Table name", + "secret": False, + "required": True, + }, + { + "key": "fields", + "type": "json", + "label": "Fields", + "default": {"Timestamp": "{event.timestamp}", "Person Name": "{person.name}"}, + "secret": False, + "required": True, + "description": "Map field names from Airtable to properties from events and person records.", + }, + { + "key": "debug", + "type": "boolean", + "label": "Log responses", + "description": "Logs the response of http calls for debugging.", + "secret": False, + "required": False, + "default": False, + }, + ], +) diff --git a/posthog/cdp/templates/airtable/test_template_airtable.py b/posthog/cdp/templates/airtable/test_template_airtable.py new file mode 100644 index 0000000000000..fb4549c7d3c35 --- /dev/null +++ b/posthog/cdp/templates/airtable/test_template_airtable.py @@ -0,0 +1,66 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.airtable.template_airtable import template as template_airtable + + +class TestTemplateAirtable(BaseHogFunctionTemplateTest): + template = template_airtable + + def test_function_works(self): + self.run_function( + inputs={ + "access_token": "test_token", + "base_id": "test_base_id", + "table_name": "test_table", + "fields": {"Name": "John Doe", "Email": "john@example.com"}, + "debug": False, + } + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.airtable.com/v0/test_base_id/test_table", + { + "headers": {"Content-Type": "application/json", "Authorization": "Bearer test_token"}, + "body": {"fields": {"Name": "John Doe", "Email": "john@example.com"}, "typecast": True}, + "method": "POST", + }, + ) + ) + assert self.get_mock_print_calls() == snapshot([]) + + def test_prints_when_debugging(self): + self.run_function( + inputs={ + "access_token": "test_token", + "base_id": "test_base_id", + "table_name": "test_table", + "fields": {"Name": "John Doe", "Email": "john@example.com"}, + "debug": True, + } + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.airtable.com/v0/test_base_id/test_table", + { + "headers": {"Content-Type": "application/json", "Authorization": "Bearer test_token"}, + "body": {"fields": {"Name": "John Doe", "Email": "john@example.com"}, "typecast": True}, + "method": "POST", + }, + ) + ) + assert self.get_mock_print_calls() == snapshot( + [ + ( + "Request", + "https://api.airtable.com/v0/test_base_id/test_table", + { + "headers": {"Content-Type": "application/json", "Authorization": "Bearer test_token"}, + "body": {"fields": {"Name": "John Doe", "Email": "john@example.com"}, "typecast": True}, + "method": "POST", + }, + ), + ("Response", 200, {}), + ] + ) diff --git a/posthog/cdp/templates/attio/template_attio.py b/posthog/cdp/templates/attio/template_attio.py new file mode 100644 index 0000000000000..8899ee01bff36 --- /dev/null +++ b/posthog/cdp/templates/attio/template_attio.py @@ -0,0 +1,74 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-attio", + name="Attio", + description="Update contacts in Attio", + icon_url="/static/services/attio.png", + category=["Advertisement"], + hog=""" +let body := { + 'data': { + 'values': { + 'email_addresses': [ + { + 'email_address': inputs.email + } + ] + } + } +} + +for (let key, value in inputs.personAttributes) { + if (not empty(value)) { + body.data.values[key] := value + } +} + +let res := fetch(f'https://api.attio.com/v2/objects/people/records?matching_attribute=email_addresses', { + 'method': 'PUT', + 'headers': { + 'Authorization': f'Bearer {inputs.apiKey}', + 'Content-Type': 'application/json', + }, + 'body': body +}) +if (res.status >= 400) { + throw Error(f'Error from api.attio.com (status {res.status}): {res.body}') +} +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Attio API Key", + "description": "Check out this page on how to get your API key: https://attio.com/help/reference/integrations-automations/generating-an-api-key", + "secret": False, + "required": True, + }, + { + "key": "email", + "type": "string", + "label": "Email of the user", + "description": "Where to find the email for the contact to be created. You can use the filters section to filter out unwanted emails or internal users.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "personAttributes", + "type": "dictionary", + "label": "Additional Person attributes", + "description": "This persons keys should be the slugs or IDs of the attributes you wish to update. For information on potential custom attributes, refer to the attribute type docs: https://developers.attio.com/docs/attribute-types", + "default": {"name": "{person.properties.name}", "job_title": "{person.properties.job_title}"}, + "secret": False, + "required": True, + }, + ], + filters={ + "events": [], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/attio/test_template_attio.py b/posthog/cdp/templates/attio/test_template_attio.py new file mode 100644 index 0000000000000..20d8af41822cb --- /dev/null +++ b/posthog/cdp/templates/attio/test_template_attio.py @@ -0,0 +1,69 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.attio.template_attio import ( + template as template_attio, +) + + +def create_inputs(**kwargs): + inputs = { + "apiKey": "apikey12345", + "email": "max@posthog.com", + "personAttributes": {"name": "Max", "job_title": "Mascot"}, + } + inputs.update(kwargs) + + return inputs + + +class TestTemplateAttio(BaseHogFunctionTemplateTest): + template = template_attio + + def test_function_works(self): + self.mock_fetch_response = lambda *args: {"status": 200, "body": {"ok": True}} # type: ignore + self.run_function(inputs=create_inputs()) + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.attio.com/v2/objects/people/records?matching_attribute=email_addresses", + { + "body": { + "data": { + "values": { + "email_addresses": [{"email_address": "max@posthog.com"}], + "name": "Max", + "job_title": "Mascot", + } + } + }, + "method": "PUT", + "headers": { + "Authorization": "Bearer apikey12345", + "Content-Type": "application/json", + }, + }, + ) + ) + + def test_ignores_empty_values(self): + self.mock_fetch_response = lambda *args: {"status": 200, "body": {"ok": True}} # type: ignore + self.run_function(inputs=create_inputs(personAttributes={"name": "Max", "job_title": ""})) + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://api.attio.com/v2/objects/people/records?matching_attribute=email_addresses", + { + "body": { + "data": { + "values": { + "email_addresses": [{"email_address": "max@posthog.com"}], + "name": "Max", + } + } + }, + "method": "PUT", + "headers": { + "Authorization": "Bearer apikey12345", + "Content-Type": "application/json", + }, + }, + ) + ) diff --git a/posthog/cdp/templates/aws_kinesis/template_aws_kinesis.py b/posthog/cdp/templates/aws_kinesis/template_aws_kinesis.py index f07f396d23983..f7d7f4d36c603 100644 --- a/posthog/cdp/templates/aws_kinesis/template_aws_kinesis.py +++ b/posthog/cdp/templates/aws_kinesis/template_aws_kinesis.py @@ -16,7 +16,7 @@ let date := formatDateTime(now(), '%Y%m%d') let payload := jsonStringify({ - 'StreamName': inputs.aws_kinesis_stream_arn, + 'StreamName': inputs.aws_kinesis_stream_name, 'PartitionKey': inputs.aws_kinesis_partition_key ?? generateUUIDv4(), 'Data': base64Encode(jsonStringify(inputs.payload)), }) @@ -81,7 +81,7 @@ if (res.status >= 200 and res.status < 300) { print('Event sent successfully!') } else { - print('Error sending event:', res.status, res.body) + throw Error(f'Error from {inputs.aws_region}.amazonaws.com (status {res.status}): {res.body}') } """.strip(), inputs_schema=[ @@ -108,9 +108,9 @@ "default": "us-east-1", }, { - "key": "aws_kinesis_stream_arn", + "key": "aws_kinesis_stream_name", "type": "string", - "label": "Kinesis Stream ARN", + "label": "Kinesis Stream Name", "secret": False, "required": True, }, diff --git a/posthog/cdp/templates/aws_kinesis/test_template_aws_kinesis.py b/posthog/cdp/templates/aws_kinesis/test_template_aws_kinesis.py index 5c4adee71ee9b..4a51393b44c08 100644 --- a/posthog/cdp/templates/aws_kinesis/test_template_aws_kinesis.py +++ b/posthog/cdp/templates/aws_kinesis/test_template_aws_kinesis.py @@ -13,7 +13,7 @@ def test_function_works(self): "aws_access_key_id": "aws_access_key_id", "aws_secret_access_key": "aws_secret_access_key", "aws_region": "aws_region", - "aws_kinesis_stream_arn": "aws_kinesis_stream_arn", + "aws_kinesis_stream_name": "aws_kinesis_stream_arn", "aws_kinesis_partition_key": "1", "payload": {"hello": "world"}, } diff --git a/posthog/cdp/templates/discord/template_discord.py b/posthog/cdp/templates/discord/template_discord.py new file mode 100644 index 0000000000000..bca3f4604e216 --- /dev/null +++ b/posthog/cdp/templates/discord/template_discord.py @@ -0,0 +1,66 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate, HogFunctionSubTemplate, SUB_TEMPLATE_COMMON + +template: HogFunctionTemplate = HogFunctionTemplate( + status="free", + id="template-discord", + name="Discord", + description="Sends a message to a discord channel", + icon_url="/static/services/discord.png", + category=["Customer Success"], + hog=""" +if (not match(inputs.webhookUrl, '^https://discord.com/api/webhooks/.*')) { + throw Error('Invalid URL. The URL should match the format: https://discord.com/api/webhooks/...') +} + +let res := fetch(inputs.webhookUrl, { + 'body': { + 'content': inputs.content + }, + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json' + } +}); + +if (res.status >= 400) { + throw Error(f'Failed to post message to Discord: {res.status}: {res.body}'); +} +""".strip(), + inputs_schema=[ + { + "key": "webhookUrl", + "type": "string", + "label": "Webhook URL", + "description": "See this page on how to generate a Webhook URL: https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks", + "secret": False, + "required": True, + }, + { + "key": "content", + "type": "string", + "label": "Content", + "description": "(see https://support.discord.com/hc/en-us/articles/210298617-Markdown-Text-101-Chat-Formatting-Bold-Italic-Underline)", + "default": "**{person.name}** triggered event: '{event.event}'", + "secret": False, + "required": True, + }, + ], + sub_templates=[ + HogFunctionSubTemplate( + id="early_access_feature_enrollment", + name="Post to Discord on feature enrollment", + description="Posts a message to Discord when a user enrolls or un-enrolls in an early access feature", + filters=SUB_TEMPLATE_COMMON["early_access_feature_enrollment"].filters, + inputs={ + "content": "**{person.name}** {event.properties.$feature_enrollment ? 'enrolled in' : 'un-enrolled from'} the early access feature for '{event.properties.$feature_flag}'" + }, + ), + HogFunctionSubTemplate( + id="survey_response", + name="Post to Discord on survey response", + description="Posts a message to Discord when a user responds to a survey", + filters=SUB_TEMPLATE_COMMON["survey_response"].filters, + inputs={"content": "**{person.name}** responded to survey **{event.properties.$survey_name}**"}, + ), + ], +) diff --git a/posthog/cdp/templates/discord/test_template_discord.py b/posthog/cdp/templates/discord/test_template_discord.py new file mode 100644 index 0000000000000..b3e2bafe4edd5 --- /dev/null +++ b/posthog/cdp/templates/discord/test_template_discord.py @@ -0,0 +1,51 @@ +import pytest +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.discord.template_discord import template as template_discord + + +class TestTemplateDiscord(BaseHogFunctionTemplateTest): + template = template_discord + + def _inputs(self, **kwargs): + inputs = { + "webhookUrl": "https://discord.com/api/webhooks/00000000000000000/xxxxxxxxxxxxxx", + "content": "**max@posthog.com** triggered event: '$pageview'", + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function(inputs=self._inputs()) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://discord.com/api/webhooks/00000000000000000/xxxxxxxxxxxxxx", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + }, + "body": { + "content": "**max@posthog.com** triggered event: '$pageview'", + }, + }, + ) + ) + + def test_only_allow_teams_url(self): + for url, allowed in [ + ["https://discord.com/api/webhooks/abc", True], + ["https://webhook.site/def", False], + ["https://webhook.site/def#https://discord.com/api/webhooks/abc", False], + ]: + if allowed: + self.run_function(inputs=self._inputs(webhookUrl=url)) + assert len(self.get_mock_fetch_calls()) == 1 + else: + with pytest.raises(Exception) as e: + self.run_function(inputs=self._inputs(webhookUrl=url)) + assert ( + e.value.message # type: ignore[attr-defined] + == "Invalid URL. The URL should match the format: https://discord.com/api/webhooks/..." + ) diff --git a/posthog/cdp/templates/gleap/template_gleap.py b/posthog/cdp/templates/gleap/template_gleap.py index a3a8337aef0df..1616c373594a6 100644 --- a/posthog/cdp/templates/gleap/template_gleap.py +++ b/posthog/cdp/templates/gleap/template_gleap.py @@ -37,7 +37,7 @@ }) if (res.status >= 400) { - print('Error from gleap.io api:', res.status, res.body) + throw Error(f'Error from gleap.io (status {res.status}): {res.body}') } """.strip(), diff --git a/posthog/cdp/templates/google_ads/template_google_ads.py b/posthog/cdp/templates/google_ads/template_google_ads.py new file mode 100644 index 0000000000000..1a658a9436ed2 --- /dev/null +++ b/posthog/cdp/templates/google_ads/template_google_ads.py @@ -0,0 +1,93 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="alpha", + id="template-google-ads", + name="Google Ads Conversions", + description="Send conversion events to Google Ads", + icon_url="/static/services/google-ads.png", + category=["Advertisement"], + hog=""" +let res := fetch(f'https://googleads.googleapis.com/v17/customers/{replaceAll(inputs.customerId, '-', '')}:uploadClickConversions', { + 'method': 'POST', + 'headers': { + 'Authorization': f'Bearer {inputs.oauth.access_token}', + 'Content-Type': 'application/json', + 'developer-token': inputs.developerToken + }, + 'body': { + 'conversions': [ + { + 'gclid': inputs.gclid, + 'conversionAction': f'customers/{replaceAll(inputs.customerId, '-', '')}/conversionActions/{replaceAll(inputs.conversionActionId, 'AW-', '')}', + 'conversionDateTime': inputs.conversionDateTime + } + ], + 'partialFailure': true, + 'validateOnly': true + } +}) + +if (res.status >= 400) { + throw Error(f'Error from googleads.googleapis.com (status {res.status}): {res.body}') +} + +""".strip(), + inputs_schema=[ + { + "key": "oauth", + "type": "integration", + "integration": "google-ads", + "label": "Google Ads account", + "secret": False, + "required": True, + }, + { + "key": "developerToken", + "type": "string", + "label": "Developer token", + "description": "This should be a 22-character long alphanumeric string. Check out this page on how to obtain such a token: https://developers.google.com/google-ads/api/docs/get-started/dev-token", + "secret": False, + "required": True, + }, + { + "key": "customerId", + "type": "string", + "label": "Customer ID", + "description": "ID of your Google Ads Account. This should be 10-digits and in XXX-XXX-XXXX format.", + "secret": False, + "required": True, + }, + { + "key": "conversionActionId", + "type": "string", + "label": "Conversion action ID", + "description": "You will find this information in the event snippet for your conversion action, for example send_to: AW-CONVERSION_ID/AW-CONVERSION_LABEL. This should be in the AW-CONVERSION_ID format.", + "secret": False, + "required": True, + }, + { + "key": "gclid", + "type": "string", + "label": "Google Click ID (gclid)", + "description": "The Google click ID (gclid) associated with this conversion.", + "default": "{person.gclid}", + "secret": False, + "required": True, + }, + { + "key": "conversionDateTime", + "type": "string", + "label": "Conversion Date Time", + "description": 'The date time at which the conversion occurred. Must be after the click time. The timezone must be specified. The format is "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00".', + "default": "{event.timestamp}", + "secret": False, + "required": True, + }, + ], + filters={ + "events": [], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/google_ads/test_template_google_ads.py b/posthog/cdp/templates/google_ads/test_template_google_ads.py new file mode 100644 index 0000000000000..49f842b84b78a --- /dev/null +++ b/posthog/cdp/templates/google_ads/test_template_google_ads.py @@ -0,0 +1,51 @@ +from inline_snapshot import snapshot +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.google_ads.template_google_ads import ( + template as template_google_ads, +) + + +class TestTemplateGoogleAds(BaseHogFunctionTemplateTest): + template = template_google_ads + + def _inputs(self, **kwargs): + inputs = { + "oauth": { + "access_token": "oauth-1234", + }, + "developerToken": "developer-token1234", + "customerId": "123-123-1234", + "conversionActionId": "AW-123456789", + "gclid": "89y4thuergnjkd34oihroh3uhg39uwhgt9", + "conversionDateTime": "2024-10-10 16:32:45+02:00", + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.mock_fetch_response = lambda *args: {"status": 200, "body": {"ok": True}} # type: ignore + self.run_function(self._inputs()) + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://googleads.googleapis.com/v17/customers/1231231234:uploadClickConversions", + { + "body": { + "conversions": [ + { + "gclid": "89y4thuergnjkd34oihroh3uhg39uwhgt9", + "conversionAction": f"customers/1231231234/conversionActions/123456789", + "conversionDateTime": "2024-10-10 16:32:45+02:00", + } + ], + "partialFailure": True, + "validateOnly": True, + }, + "method": "POST", + "headers": { + "Authorization": "Bearer oauth-1234", + "Content-Type": "application/json", + "developer-token": "developer-token1234", + }, + }, + ) + ) diff --git a/posthog/cdp/templates/intercom/template_intercom.py b/posthog/cdp/templates/intercom/template_intercom.py index 8ecf2d6e1e484..43bfcc0d80f37 100644 --- a/posthog/cdp/templates/intercom/template_intercom.py +++ b/posthog/cdp/templates/intercom/template_intercom.py @@ -37,11 +37,11 @@ } if (res.status == 404) { - print('No existing contact found for email') + throw Error('No existing contact found for email') return } -print('Error sending event:', res.status, res.body) +throw Error(f'Error from intercom api (status {res.status}): {res.body}') """.strip(), inputs_schema=[ diff --git a/posthog/cdp/templates/intercom/test_template_intercom.py b/posthog/cdp/templates/intercom/test_template_intercom.py index f76f48f1d5485..099f43214dc8c 100644 --- a/posthog/cdp/templates/intercom/test_template_intercom.py +++ b/posthog/cdp/templates/intercom/test_template_intercom.py @@ -1,3 +1,4 @@ +import pytest from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest from posthog.cdp.templates.intercom.template_intercom import template as template_intercom, TemplateIntercomMigrator @@ -56,8 +57,9 @@ def test_exits_if_no_email(self): def test_logs_missing_error(self): self.mock_fetch_response = lambda *args: {"status": 404, "body": {"status": "missing"}} # type: ignore - self.run_function(inputs=self._inputs()) - assert self.get_mock_print_calls() == [("No existing contact found for email",)] + with pytest.raises(Exception) as e: + self.run_function(inputs=self._inputs()) + assert e.value.message == "No existing contact found for email" # type: ignore[attr-defined] def test_logs_other_errors(self): self.mock_fetch_response = lambda *args: { # type: ignore @@ -68,18 +70,12 @@ def test_logs_other_errors(self): "errors": [{"code": "error", "message": "Other error"}], }, } - self.run_function(inputs=self._inputs()) - assert self.get_mock_print_calls() == [ - ( - "Error sending event:", - 400, - { - "type": "error.list", - "request_id": "001dh0h1qb205el244gg", - "errors": [{"code": "error", "message": "Other error"}], - }, - ) - ] + with pytest.raises(Exception) as e: + self.run_function(inputs=self._inputs()) + assert ( + e.value.message # type: ignore[attr-defined] + == "Error from intercom api (status 400): {'type': 'error.list', 'request_id': '001dh0h1qb205el244gg', 'errors': [{'code': 'error', 'message': 'Other error'}]}" + ) class TestTemplateMigration(BaseTest): diff --git a/posthog/cdp/templates/klaviyo/template_klaviyo.py b/posthog/cdp/templates/klaviyo/template_klaviyo.py new file mode 100644 index 0000000000000..805e1ad887226 --- /dev/null +++ b/posthog/cdp/templates/klaviyo/template_klaviyo.py @@ -0,0 +1,264 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template_user: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-klaviyo-user", + name="Klaviyo", + description="Updates a contact in Klaviyo", + icon_url="/static/services/klaviyo.png", + category=["Email Marketing"], + hog=""" +if (empty(inputs.externalId) and empty(inputs.email)) { + print('Email or External ID has to be set. Skipping...') + return +} + +let body := { + 'data': { + 'type': 'profile', + 'attributes': { + 'location': {}, + 'properties': {}, + } + } +} + +if (not empty(person.properties.$geoip_latitude)) body.data.attributes.location.latitude := person.properties.$geoip_latitude +if (not empty(person.properties.$geoip_longitude)) body.data.attributes.location.longitude := person.properties.$geoip_longitude +if (not empty(person.properties.$geoip_city_name)) body.data.attributes.location.city := person.properties.$geoip_city_name +if (not empty(person.properties.$geoip_country_name)) body.data.attributes.location.country := person.properties.$geoip_country_name +if (not empty(person.properties.$geoip_continent_code)) body.data.attributes.location.region := person.properties.$geoip_continent_code +if (not empty(person.properties.$geoip_postal_code)) body.data.attributes.location.zip := person.properties.$geoip_postal_code +if (not empty(person.properties.$geoip_time_zone)) body.data.attributes.location.timezone := person.properties.$geoip_time_zone + +if (not empty(inputs.email)) body.data.attributes.email := inputs.email +if (not empty(inputs.externalId)) body.data.attributes.external_id := inputs.externalId + +if (inputs.include_all_properties) { + for (let key, value in person.properties) { + if (not empty(value) and not key like '$%') { + body.data.attributes.properties[key] := value + } + } +} + +for (let key, value in inputs.customProperties) { + if (not empty(value)) { + body.data.attributes.properties[key] := value + } +} + +let res := fetch('https://a.klaviyo.com/api/profiles', { + 'method': 'POST', + 'headers': { + 'Authorization': f'Klaviyo-API-Key {inputs.apiKey}', + 'revision': '2024-10-15', + 'Content-Type': 'application/json' + }, + 'body': body +}) + +if (res.status == 409 and not empty(res.body.errors.1.meta.duplicate_profile_id)) { + let id := res.body.errors.1.meta.duplicate_profile_id + body.data.id := id + + let res2 := fetch(f'https://a.klaviyo.com/api/profiles/{id}', { + 'method': 'PATCH', + 'headers': { + 'Authorization': f'Klaviyo-API-Key {inputs.apiKey}', + 'revision': '2024-10-15', + 'Content-Type': 'application/json' + }, + 'body': body + }) + if (res2.status >= 400) { + throw Error(f'Error from a.klaviyo.com api: {res2.status}: {res2.body}'); + } +} else if (res.status >= 400) { + throw Error(f'Error from a.klaviyo.com api: {res.status}: {res.body}'); +} + +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Klaviyo Private API Key", + "description": "You can create a Private API Key in the account settings (https://www.klaviyo.com/settings/account/api-keys)", + "secret": True, + "required": True, + }, + { + "key": "email", + "type": "string", + "label": "User Email", + "description": "Where to find the email for the contact to be created. You can use the filters section to filter out unwanted emails or internal users.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "externalId", + "type": "string", + "label": "External ID", + "description": "A unique identifier used to associate Klaviyo profiles with profiles in an external system", + "default": "{person.id}", + "secret": False, + "required": True, + }, + { + "key": "include_all_properties", + "type": "boolean", + "label": "Include all person properties as custom properties", + "description": "If set, all event properties will be included as attributes. Individual attributes can be overridden below. For identify events the Person properties will be used.", + "default": False, + "secret": False, + "required": True, + }, + { + "key": "customProperties", + "type": "dictionary", + "label": "Custom properties", + "description": "Map of Custom properties and their values.", + "default": { + "first_name": "{person.properties.firstname}", + "last_name": "{person.properties.lastname}", + "title": "{person.properties.title}", + "organization": "{person.properties.organization}", + "phone_number": "{person.properties.phone}", + }, + "secret": False, + "required": False, + }, + ], + filters={ + "events": [ + {"id": "$identify", "name": "$identify", "type": "events", "order": 0}, + {"id": "$set", "name": "$set", "type": "events", "order": 0}, + ], + "actions": [], + "filter_test_accounts": True, + }, +) + +template_event: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + id="template-klaviyo-event", + name="Klaviyo", + description="Send events to Klaviyo", + icon_url="/static/services/klaviyo.png", + category=["Email Marketing"], + hog=""" +if (empty(inputs.externalId) and empty(inputs.email)) { + print('Email or External ID has to be set. Skipping...') + return +} + +let body := { + 'data': { + 'type': 'event', + 'attributes': { + 'properties': {}, + 'metric': { + 'data': { + 'type': 'metric', + 'attributes': { + 'name': event.event + } + } + }, + 'profile': { + 'data': { + 'type': 'profile', + 'attributes': {} + } + } + } + } +} + +if (not empty(inputs.email)) body.data.attributes.profile.data.attributes.email := inputs.email +if (not empty(inputs.externalId)) body.data.attributes.profile.data.attributes.external_id := inputs.externalId + +if (inputs.include_all_properties) { + for (let key, value in event.properties) { + if (not empty(value) and not key like '$%') { + body.data.attributes.properties[key] := value + } + } +} + +for (let key, value in inputs.attributes) { + if (not empty(value)) { + body.data.attributes.properties[key] := value + } +} + +let res := fetch('https://a.klaviyo.com/api/events', { + 'method': 'POST', + 'headers': { + 'Authorization': f'Klaviyo-API-Key {inputs.apiKey}', + 'revision': '2024-10-15', + 'Content-Type': 'application/json' + }, + 'body': body +}) + + +if (res.status >= 400) { + throw Error(f'Error from a.klaviyo.com api: {res.status}: {res.body}'); +} + +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Klaviyo Private API Key", + "description": "You can create a Private API Key in the account settings (https://www.klaviyo.com/settings/account/api-keys)", + "secret": True, + "required": True, + }, + { + "key": "email", + "type": "string", + "label": "User Email", + "description": "Where to find the email for the contact to be created. You can use the filters section to filter out unwanted emails or internal users.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "externalId", + "type": "string", + "label": "External ID", + "description": "A unique identifier used to associate Klaviyo profiles with profiles in an external system", + "default": "{person.id}", + "secret": False, + "required": True, + }, + { + "key": "include_all_properties", + "type": "boolean", + "label": "Include all event properties as event attributes", + "description": "If set, all event properties will be included as attributes. Individual attributes can be overridden below.", + "default": False, + "secret": False, + "required": True, + }, + { + "key": "attributes", + "type": "dictionary", + "label": "Attributes", + "description": "Map of event attributes and their values.", + "default": {"price": "{event.properties.price}", "currency": "{event.properties.currency}"}, + "secret": False, + "required": False, + }, + ], + filters={ + "events": [], + "actions": [], + "filter_test_accounts": True, + }, +) diff --git a/posthog/cdp/templates/klaviyo/test_template_klaviyo.py b/posthog/cdp/templates/klaviyo/test_template_klaviyo.py new file mode 100644 index 0000000000000..df1d8b32daaa1 --- /dev/null +++ b/posthog/cdp/templates/klaviyo/test_template_klaviyo.py @@ -0,0 +1,259 @@ +from inline_snapshot import snapshot +import pytest +from hogvm.python.utils import UncaughtHogVMException +from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest +from posthog.cdp.templates.klaviyo.template_klaviyo import ( + template_user as klaviyo_user, + template_event as klaviyo_event, +) + + +class TestTemplateKlaviyoUser(BaseHogFunctionTemplateTest): + template = klaviyo_user + + def create_inputs(self, **kwargs): + inputs = { + "apiKey": "API_KEY", + "email": "max@posthog.com", + "externalId": "EXTERNAL_ID", + "include_all_properties": False, + "customProperties": { + "first_name": "Max", + "last_name": "AI", + "title": "Hedgehog in Residence", + "organization": "PostHog", + "phone_number": "+0123456789", + }, + } + inputs.update(kwargs) + + return inputs + + def test_function_works(self): + self.run_function( + inputs=self.create_inputs(), + globals={ + "person": {"properties": {"$geoip_country_name": "United States", "plan": "pay-as-you-go"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://a.klaviyo.com/api/profiles", + { + "method": "POST", + "headers": { + "Authorization": "Klaviyo-API-Key API_KEY", + "revision": "2024-10-15", + "Content-Type": "application/json", + }, + "body": { + "data": { + "type": "profile", + "attributes": { + "location": {"country": "United States"}, + "properties": { + "first_name": "Max", + "last_name": "AI", + "title": "Hedgehog in Residence", + "organization": "PostHog", + "phone_number": "+0123456789", + }, + "email": "max@posthog.com", + "external_id": "EXTERNAL_ID", + }, + } + }, + }, + ) + ) + + def test_patch_existing_profile(self): + self.mock_fetch_response = lambda *args: { # type: ignore + "status": 409, + "body": { + "errors": [ + { + "id": "207e2b13-ac84-4afe-a064-616a33006e6e", + "status": 409, + "code": "duplicate_profile", + "title": "Conflict.", + "detail": "A profile already exists with one of these identifiers.", + "source": {"pointer": "/data/attributes"}, + "links": {}, + "meta": {"duplicate_profile_id": "01JAFS0VVWGJFE7QE4EJQBA5AS"}, + } + ] + }, + } + + # both requests will fail with error code 409 + with pytest.raises(UncaughtHogVMException): + self.run_function(inputs=self.create_inputs()) + + assert self.get_mock_fetch_calls()[1] == snapshot( + ( + "https://a.klaviyo.com/api/profiles/01JAFS0VVWGJFE7QE4EJQBA5AS", + { + "method": "PATCH", + "headers": { + "Authorization": "Klaviyo-API-Key API_KEY", + "revision": "2024-10-15", + "Content-Type": "application/json", + }, + "body": { + "data": { + "type": "profile", + "attributes": { + "location": {}, + "properties": { + "first_name": "Max", + "last_name": "AI", + "title": "Hedgehog in Residence", + "organization": "PostHog", + "phone_number": "+0123456789", + }, + "email": "max@posthog.com", + "external_id": "EXTERNAL_ID", + }, + "id": "01JAFS0VVWGJFE7QE4EJQBA5AS", + } + }, + }, + ) + ) + + def test_body_includes_all_properties_if_set(self): + self.run_function( + inputs=self.create_inputs(include_all_properties=False), + globals={"person": {"properties": {"$geoip_country_name": "United States", "plan": "pay-as-you-go"}}}, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["data"]["attributes"]["properties"] == snapshot( + { + "first_name": "Max", + "last_name": "AI", + "title": "Hedgehog in Residence", + "organization": "PostHog", + "phone_number": "+0123456789", + } + ) + + self.run_function( + inputs=self.create_inputs(include_all_properties=True), + globals={"person": {"properties": {"$geoip_country_name": "United States", "plan": "pay-as-you-go"}}}, + ) + assert self.get_mock_fetch_calls()[0][1]["body"]["data"]["attributes"]["properties"] == snapshot( + { + "first_name": "Max", + "last_name": "AI", + "title": "Hedgehog in Residence", + "organization": "PostHog", + "phone_number": "+0123456789", + "plan": "pay-as-you-go", + } + ) + + def test_function_requires_identifier(self): + self.run_function(inputs=self.create_inputs(email=None, externalId="")) + + assert not self.get_mock_fetch_calls() + assert self.get_mock_print_calls() == snapshot([("Email or External ID has to be set. Skipping...",)]) + + def test_function_errors_on_bad_status(self): + self.mock_fetch_response = lambda *args: {"status": 400, "body": {"error": "error"}} # type: ignore + with pytest.raises(UncaughtHogVMException) as e: + self.run_function(inputs=self.create_inputs()) + assert e.value.message == "Error from a.klaviyo.com api: 400: {'error': 'error'}" + + +class TestTemplateKlaviyoEvent(BaseHogFunctionTemplateTest): + def create_inputs(self, **kwargs): + inputs = { + "apiKey": "API_KEY", + "email": "max@posthog.com", + "externalId": "EXTERNAL_ID", + "include_all_properties": False, + "attributes": {"price": "25.99", "currency": "USD"}, + } + inputs.update(kwargs) + + return inputs + + template = klaviyo_event + + def test_function_works(self): + self.run_function( + inputs=self.create_inputs(), + globals={ + "event": { + "event": "purchase", + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://a.klaviyo.com/api/events", + { + "method": "POST", + "headers": { + "Authorization": "Klaviyo-API-Key API_KEY", + "revision": "2024-10-15", + "Content-Type": "application/json", + }, + "body": { + "data": { + "type": "event", + "attributes": { + "properties": {"price": "25.99", "currency": "USD"}, + "metric": {"data": {"type": "metric", "attributes": {"name": "purchase"}}}, + "profile": { + "data": { + "type": "profile", + "attributes": {"email": "max@posthog.com", "external_id": "EXTERNAL_ID"}, + } + }, + }, + } + }, + }, + ) + ) + + def test_body_includes_all_properties_if_set(self): + self.run_function( + inputs=self.create_inputs(include_all_properties=False), + globals={ + "event": {"event": "purchase", "properties": {"customerType": "B2C"}}, + }, + ) + + assert self.get_mock_fetch_calls()[0][1]["body"]["data"]["attributes"]["properties"] == snapshot( + { + "price": "25.99", + "currency": "USD", + } + ) + + self.run_function( + inputs=self.create_inputs(include_all_properties=True), + globals={ + "event": {"event": "purchase", "properties": {"customerType": "B2C"}}, + }, + ) + assert self.get_mock_fetch_calls()[0][1]["body"]["data"]["attributes"]["properties"] == snapshot( + {"price": "25.99", "currency": "USD", "customerType": "B2C"} + ) + + def test_function_requires_identifier(self): + self.run_function(inputs=self.create_inputs(email=None, externalId="")) + + assert not self.get_mock_fetch_calls() + assert self.get_mock_print_calls() == snapshot([("Email or External ID has to be set. Skipping...",)]) + + def test_function_errors_on_bad_status(self): + self.mock_fetch_response = lambda *args: {"status": 400, "body": {"error": "error"}} # type: ignore + with pytest.raises(UncaughtHogVMException) as e: + self.run_function(inputs=self.create_inputs()) + assert e.value.message == "Error from a.klaviyo.com api: 400: {'error': 'error'}" diff --git a/posthog/cdp/templates/knock/template_knock.py b/posthog/cdp/templates/knock/template_knock.py index f635827081f4d..235f7d38a60ed 100644 --- a/posthog/cdp/templates/knock/template_knock.py +++ b/posthog/cdp/templates/knock/template_knock.py @@ -40,7 +40,7 @@ }) if (res.status >= 400) { - print('Error from knock.app api:', res.status, res.body) + throw Error(f'Error from knock.app (status {res.status}): {res.body}') } """.strip(), diff --git a/posthog/cdp/templates/mailgun/template_mailgun.py b/posthog/cdp/templates/mailgun/template_mailgun.py index 6ee13082e3094..e220bf6535dce 100644 --- a/posthog/cdp/templates/mailgun/template_mailgun.py +++ b/posthog/cdp/templates/mailgun/template_mailgun.py @@ -51,7 +51,7 @@ }) if (res.status >= 400) { - print('Error from Mailgun API:', res.status, res.body) + throw Error(f'Error from mailgun api (status {res.status}): {res.body}') } """.strip(), inputs_schema=[ diff --git a/posthog/cdp/templates/meta_ads/template_meta_ads.py b/posthog/cdp/templates/meta_ads/template_meta_ads.py index f136d3902f445..ad5b2fed1ff7c 100644 --- a/posthog/cdp/templates/meta_ads/template_meta_ads.py +++ b/posthog/cdp/templates/meta_ads/template_meta_ads.py @@ -3,7 +3,7 @@ template: HogFunctionTemplate = HogFunctionTemplate( status="alpha", id="template-meta-ads", - name="Google Ads Conversions", + name="Meta Ads Conversions", description="Send conversion events to Meta Ads", icon_url="/static/services/meta-ads.png", category=["Advertisement"], @@ -26,7 +26,7 @@ } }) if (res.status >= 400) { - print('Error from graph.facebook.com api:', res.status, res.body) + throw Error(f'Error from graph.facebook.com (status {res.status}): {res.body}') } """.strip(), inputs_schema=[ diff --git a/posthog/cdp/templates/sendgrid/template_sendgrid.py b/posthog/cdp/templates/sendgrid/template_sendgrid.py index e545b09017883..96a0d07ccdf2e 100644 --- a/posthog/cdp/templates/sendgrid/template_sendgrid.py +++ b/posthog/cdp/templates/sendgrid/template_sendgrid.py @@ -57,7 +57,7 @@ }) if (res.status > 300) { - print('Error updating contact:', res.status, res.body) + throw Error(f'Error from api.sendgrid.com (status {res.status}): {res.body}') } """.strip(), inputs_schema=[ diff --git a/posthog/constants.py b/posthog/constants.py index d90f91c359d75..92b842e8c612a 100644 --- a/posthog/constants.py +++ b/posthog/constants.py @@ -258,14 +258,6 @@ class RetentionQueryType(StrEnum): TARGET_FIRST_TIME = "target_first_time" -class ExperimentSignificanceCode(StrEnum): - SIGNIFICANT = "significant" - NOT_ENOUGH_EXPOSURE = "not_enough_exposure" - LOW_WIN_PROBABILITY = "low_win_probability" - HIGH_LOSS = "high_loss" - HIGH_P_VALUE = "high_p_value" - - class ExperimentNoResultsErrorKeys(StrEnum): NO_EVENTS = "no-events" NO_FLAG_INFO = "no-flag-info" diff --git a/posthog/hogql/ai.py b/posthog/hogql/ai.py index 4a5ff29b69477..3b010fd4268b8 100644 --- a/posthog/hogql/ai.py +++ b/posthog/hogql/ai.py @@ -32,7 +32,7 @@ ORDER BY week_of DESC""" SCHEMA_MESSAGE = ( - "My schema is:\n{schema_description}\nPerson or event metadata unspecified above (emails, names, etc.) " + "This project's schema is:\n\n{schema_description}\nPerson or event metadata unspecified above (emails, names, etc.) " 'is stored in `properties` fields, accessed like: `properties.foo.bar`. Note: "persons" means "users".\nSpecial events/properties such as pageview or screen start with `$`. Custom ones don\'t.' ) @@ -63,8 +63,8 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t schema_description = "\n\n".join( ( f"Table {table_name} with fields:\n" - + "\n".join(f'- {field["key"]} ({field["type"]})' for field in table_fields) - for table_name, table_fields in serialized_database.items() + + "\n".join(f"- {field.name} ({field.type})" for field in table.fields.values()) + for table_name, table in serialized_database.items() ) ) instance_region = get_instance_region() or "HOBBY" diff --git a/posthog/hogql/base.py b/posthog/hogql/base.py index 6f281e3a393ab..6eede3377bebb 100644 --- a/posthog/hogql/base.py +++ b/posthog/hogql/base.py @@ -1,7 +1,7 @@ import re from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Literal, Optional +from typing import TYPE_CHECKING, Literal, Optional, TypeVar from posthog.hogql.constants import ConstantDataType from posthog.hogql.errors import NotImplementedError @@ -34,6 +34,24 @@ def accept(self, visitor): return visitor.visit_unknown(self) raise NotImplementedError(f"{visitor.__class__.__name__} has no method {method_name}") + def to_hogql(self): + from posthog.hogql.printer import print_prepared_ast + from posthog.hogql.context import HogQLContext + + return print_prepared_ast( + node=self, + context=HogQLContext(enable_select_queries=True, limit_top_select=False), + dialect="hogql", + ) + + def __str__(self): + if isinstance(self, Type): + return super().__str__() + return f"sql({self.to_hogql()})" + + +_T_AST = TypeVar("_T_AST", bound=AST) + @dataclass(kw_only=True) class Type(AST): diff --git a/posthog/hogql/context.py b/posthog/hogql/context.py index e47b854f01ac7..615aedc8d2f6c 100644 --- a/posthog/hogql/context.py +++ b/posthog/hogql/context.py @@ -23,7 +23,7 @@ class HogQLContext: """Context given to a HogQL expression printer""" # Team making the queries - team_id: Optional[int] + team_id: Optional[int] = None # Team making the queries - if team is passed in, then the team isn't queried when creating the database team: Optional["Team"] = None # Virtual database we're querying, will be populated from team_id if not present diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py index 616d1b1d8e57f..ddc5901a637d8 100644 --- a/posthog/hogql/database/test/test_database.py +++ b/posthog/hogql/database/test/test_database.py @@ -111,6 +111,27 @@ def test_serialize_database_warehouse_table_s3(self): assert field.type == "string" assert field.schema_valid is True + def test_serialize_database_warehouse_with_deleted_joins(self): + DataWarehouseJoin.objects.create( + team=self.team, + source_table_name="events", + source_table_key="event", + joining_table_name="groups", + joining_table_key="key", + field_name="some_field", + deleted=True, + ) + + db = create_hogql_database(team_id=self.team.pk) + + serialized_database = serialize_database(HogQLContext(team_id=self.team.pk, database=db)) + + events_table = serialized_database.get("events") + assert events_table is not None + + joined_field = events_table.fields.get("some_field") + assert joined_field is None + def test_serialize_database_warehouse_table_s3_with_hyphens(self): credentials = DataWarehouseCredential.objects.create(access_key="blah", access_secret="blah", team=self.team) DataWarehouseTable.objects.create( diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 0b1830e21a264..45952aee0f2c9 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -340,6 +340,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "arraySplit": HogQLFunctionMeta("arraySplit", 2, None), "arrayReverseFill": HogQLFunctionMeta("arrayReverseFill", 2, None), "arrayReverseSplit": HogQLFunctionMeta("arrayReverseSplit", 2, None), + "arrayRotateLeft": HogQLFunctionMeta("arrayRotateLeft", 2, 2), "arrayRotateRight": HogQLFunctionMeta("arrayRotateRight", 2, 2), "arrayExists": HogQLFunctionMeta("arrayExists", 1, None), "arrayAll": HogQLFunctionMeta("arrayAll", 1, None), diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index a77791034803c..1ebe4f229dcf8 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -8,7 +8,7 @@ from posthog.clickhouse.property_groups import property_groups from posthog.hogql import ast -from posthog.hogql.base import AST +from posthog.hogql.base import AST, _T_AST from posthog.hogql.constants import ( MAX_SELECT_RETURNED_ROWS, HogQLGlobalSettings, @@ -78,7 +78,7 @@ def to_printed_hogql(query: ast.Expr, team: Team, modifiers: Optional[HogQLQuery def print_ast( - node: ast.Expr, + node: _T_AST, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], stack: Optional[list[ast.SelectQuery]] = None, @@ -99,12 +99,12 @@ def print_ast( def prepare_ast_for_printing( - node: ast.Expr, + node: _T_AST, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], stack: Optional[list[ast.SelectQuery]] = None, settings: Optional[HogQLGlobalSettings] = None, -) -> ast.Expr | None: +) -> _T_AST | None: with context.timings.measure("create_hogql_database"): context.database = context.database or create_hogql_database(context.team_id, context.modifiers, context.team) @@ -166,7 +166,7 @@ def prepare_ast_for_printing( def print_prepared_ast( - node: ast.Expr, + node: _T_AST, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], stack: Optional[list[ast.SelectQuery]] = None, @@ -523,6 +523,10 @@ def visit_join_expr(self, node: ast.JoinExpr) -> JoinExprResponse: join_strings.append(self._print_identifier(node.type.table.to_printed_hogql())) else: raise ImpossibleASTError(f"Unexpected LazyTableType for: {node.type.table.to_printed_hogql()}") + + elif self.dialect == "hogql": + join_strings.append(self.visit(node.table)) + else: raise QueryError( f"Only selecting from a table or a subquery is supported. Unexpected type: {node.type.__class__.__name__}" @@ -921,7 +925,7 @@ def visit_constant(self, node: ast.Constant): return self.context.add_value(node.value) def visit_field(self, node: ast.Field): - if node.type is None: + if node.type is None and self.dialect != "hogql": field = ".".join([self._print_hogql_identifier_or_index(identifier) for identifier in node.chain]) raise ImpossibleASTError(f"Field {field} has no type") diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py index 06f6fbea9dfc7..b263843598f87 100644 --- a/posthog/hogql/resolver.py +++ b/posthog/hogql/resolver.py @@ -4,6 +4,7 @@ from posthog.hogql import ast from posthog.hogql.ast import ConstantType, FieldTraverserType +from posthog.hogql.base import _T_AST from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import ( FunctionCallTable, @@ -77,11 +78,11 @@ def resolve_types_from_table( def resolve_types( - node: ast.Expr | ast.SelectQuery, + node: _T_AST, context: HogQLContext, dialect: Literal["hogql", "clickhouse"], scopes: Optional[list[ast.SelectQueryType]] = None, -) -> ast.Expr: +) -> _T_AST: return Resolver(scopes=scopes, context=context, dialect=dialect).visit(node) diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index fd735a8b87ce4..021dee3bcf947 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -114,6 +114,11 @@ def test_to_printed_hogql(self): repsponse, f"SELECT\n plus(1, 2),\n 3\nFROM\n events\nLIMIT {MAX_SELECT_RETURNED_ROWS}" ) + def test_print_to_string(self): + assert str(parse_select("select 1 + 2, 3 from events")) == "sql(SELECT plus(1, 2), 3 FROM events)" + assert str(parse_expr("1 + 2")) == "sql(plus(1, 2))" + assert str(parse_expr("unknown_field")) == "sql(unknown_field)" + def test_literals(self): self.assertEqual(self._expr("1 + 2"), "plus(1, 2)") self.assertEqual(self._expr("-1 + 2"), "plus(-1, 2)") diff --git a/posthog/hogql/transforms/in_cohort.py b/posthog/hogql/transforms/in_cohort.py index fec1a4d7ccd16..418b097e1d07d 100644 --- a/posthog/hogql/transforms/in_cohort.py +++ b/posthog/hogql/transforms/in_cohort.py @@ -2,6 +2,7 @@ from posthog.hogql import ast +from posthog.hogql.base import _T_AST from posthog.hogql.context import HogQLContext from posthog.hogql.errors import QueryError from posthog.hogql.escape_sql import escape_clickhouse_string @@ -11,7 +12,7 @@ def resolve_in_cohorts( - node: ast.Expr, + node: _T_AST, dialect: Literal["hogql", "clickhouse"], stack: Optional[list[ast.SelectQuery]] = None, context: HogQLContext = None, @@ -20,7 +21,7 @@ def resolve_in_cohorts( def resolve_in_cohorts_conjoined( - node: ast.Expr, + node: ast.AST, dialect: Literal["hogql", "clickhouse"], context: HogQLContext, stack: Optional[list[ast.SelectQuery]] = None, diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index aa0ccfada96e2..13fffdbb1a41c 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -2,6 +2,7 @@ from typing import Optional, cast, Literal from posthog.hogql import ast +from posthog.hogql.base import _T_AST from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import LazyTableToAdd, LazyJoinToAdd from posthog.hogql.errors import ResolutionError @@ -13,7 +14,7 @@ # This mutates the nodes def resolve_lazy_tables( - node: ast.Expr, + node: _T_AST, dialect: Literal["hogql", "clickhouse"], stack: Optional[list[ast.SelectQuery]], context: HogQLContext, diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py index c1bfcc622ff75..53d7df7ebf355 100644 --- a/posthog/hogql/transforms/property_types.py +++ b/posthog/hogql/transforms/property_types.py @@ -13,7 +13,7 @@ from posthog.hogql.database.s3_table import S3Table -def build_property_swapper(node: ast.Expr, context: HogQLContext) -> None: +def build_property_swapper(node: ast.AST, context: HogQLContext) -> None: from posthog.models import PropertyDefinition if not context or not context.team_id: diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr index 369f18ab9d118..f018e96ef067a 100644 --- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr +++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr @@ -31,7 +31,7 @@ FROM events LEFT JOIN ( SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id FROM person_static_cohort - WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [2]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) + WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [4]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0 @@ -42,7 +42,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [2])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [4])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' @@ -55,7 +55,7 @@ FROM events LEFT JOIN ( SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id FROM person_static_cohort - WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [3]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) + WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [5]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id) WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0)) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0 @@ -66,7 +66,7 @@ FROM events LEFT JOIN ( SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id FROM static_cohort_people - WHERE in(cohort_id, [3])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) + WHERE in(cohort_id, [5])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id) WHERE and(1, equals(__in_cohort.matched, 1)) LIMIT 100 ''' diff --git a/posthog/hogql/variables.py b/posthog/hogql/variables.py index f4e8458823810..efad02ac95417 100644 --- a/posthog/hogql/variables.py +++ b/posthog/hogql/variables.py @@ -44,7 +44,11 @@ def visit_placeholder(self, node): if not matching_insight_variable: raise QueryError(f"Variable {variable_code_name} does not exist") - value = matching_variable.value or matching_insight_variable[0].default_value + value = ( + matching_variable.value + if matching_variable.value is not None + else matching_insight_variable[0].default_value + ) return ast.Constant(value=value) diff --git a/posthog/hogql_queries/apply_dashboard_filters.py b/posthog/hogql_queries/apply_dashboard_filters.py index 6d8e74f0fb588..9cb016eb48da9 100644 --- a/posthog/hogql_queries/apply_dashboard_filters.py +++ b/posthog/hogql_queries/apply_dashboard_filters.py @@ -22,3 +22,31 @@ def apply_dashboard_filters_to_dict(query: dict, filters: dict, team: Team) -> d return query query_runner.apply_dashboard_filters(DashboardFilter(**filters)) return query_runner.query.model_dump() + + +# Apply the variables from the django-style Dashboard object +def apply_dashboard_variables_to_dict(query: dict, variables_overrides: dict[str, dict], team: Team) -> dict: + if not variables_overrides: + return query + + if query.get("kind") in WRAPPER_NODE_KINDS: + source = apply_dashboard_variables_to_dict(query["source"], variables_overrides, team) + return {**query, "source": source} + + if query.get("kind") == NodeKind.HOG_QL_QUERY: + query_variables: dict[str, dict] | None = query.get("variables") + if query_variables is None: + return query + + for variable_id, overriden_hogql_variable in variables_overrides.items(): + query_variable = query_variables.get(variable_id) + if query_variable: + query_variables[variable_id] = { + "variableId": variable_id, + "code_name": query_variable["code_name"], + "value": overriden_hogql_variable.get("value"), + } + + return {**query, "variables": query_variables} + + return query diff --git a/posthog/hogql_queries/error_tracking_query_runner.py b/posthog/hogql_queries/error_tracking_query_runner.py index dc40e966d2eb5..f32b55a4a078d 100644 --- a/posthog/hogql_queries/error_tracking_query_runner.py +++ b/posthog/hogql_queries/error_tracking_query_runner.py @@ -149,7 +149,6 @@ def where(self): or_exprs: list[ast.Expr] = [] props_to_search = [ "$exception_list", - "$exception_stack_trace_raw", "$exception_type", "$exception_message", ] diff --git a/posthog/hogql_queries/experiments/experiment_funnel_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnel_query_runner.py deleted file mode 100644 index 0ff9a1058977b..0000000000000 --- a/posthog/hogql_queries/experiments/experiment_funnel_query_runner.py +++ /dev/null @@ -1,93 +0,0 @@ -from posthog.hogql import ast -from posthog.hogql_queries.query_runner import QueryRunner -from posthog.models.experiment import Experiment -from ..insights.funnels.funnels_query_runner import FunnelsQueryRunner -from posthog.schema import ( - CachedExperimentFunnelQueryResponse, - ExperimentFunnelQuery, - ExperimentFunnelQueryResponse, - ExperimentVariantFunnelResult, - FunnelsQuery, - InsightDateRange, - BreakdownFilter, -) -from typing import Any -from zoneinfo import ZoneInfo - - -class ExperimentFunnelQueryRunner(QueryRunner): - query: ExperimentFunnelQuery - response: ExperimentFunnelQueryResponse - cached_response: CachedExperimentFunnelQueryResponse - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.experiment = Experiment.objects.get(id=self.query.experiment_id) - self.feature_flag = self.experiment.feature_flag - self.prepared_funnel_query = self._prepare_funnel_query() - self.query_runner = FunnelsQueryRunner( - query=self.prepared_funnel_query, team=self.team, timings=self.timings, limit_context=self.limit_context - ) - - def calculate(self) -> ExperimentFunnelQueryResponse: - response = self.query_runner.calculate() - results = self._process_results(response.results) - return ExperimentFunnelQueryResponse(insight="FUNNELS", results=results) - - def _prepare_funnel_query(self) -> FunnelsQuery: - """ - This method takes the raw funnel query and adapts it - for the needs of experiment analysis: - - 1. Set the date range to match the experiment's duration, using the project's timezone. - 2. Configure the breakdown to use the feature flag key, which allows us - to separate results for different experiment variants. - """ - # Clone the source query - prepared_funnel_query = FunnelsQuery(**self.query.source.model_dump()) - - # Set the date range to match the experiment's duration, using the project's timezone - if self.team.timezone: - tz = ZoneInfo(self.team.timezone) - start_date = self.experiment.start_date.astimezone(tz) if self.experiment.start_date else None - end_date = self.experiment.end_date.astimezone(tz) if self.experiment.end_date else None - else: - start_date = self.experiment.start_date - end_date = self.experiment.end_date - - prepared_funnel_query.dateRange = InsightDateRange( - date_from=start_date.isoformat() if start_date else None, - date_to=end_date.isoformat() if end_date else None, - explicitDate=True, - ) - - # Configure the breakdown to use the feature flag key - prepared_funnel_query.breakdownFilter = BreakdownFilter( - breakdown=f"$feature/{self.feature_flag.key}", - breakdown_type="event", - ) - - return prepared_funnel_query - - def _process_results(self, funnels_results: list[list[dict[str, Any]]]) -> dict[str, ExperimentVariantFunnelResult]: - variants = self.feature_flag.variants - processed_results = { - variant["key"]: ExperimentVariantFunnelResult(key=variant["key"], success_count=0, failure_count=0) - for variant in variants - } - - for result in funnels_results: - first_step = result[0] - last_step = result[-1] - variant = first_step.get("breakdown_value") - variant_str = variant[0] if isinstance(variant, list) else str(variant) - if variant_str in processed_results: - total_count = first_step.get("count", 0) - success_count = last_step.get("count", 0) if len(result) > 1 else 0 - processed_results[variant_str].success_count = success_count - processed_results[variant_str].failure_count = total_count - success_count - - return processed_results - - def to_query(self) -> ast.SelectQuery: - raise ValueError(f"Cannot convert source query of type {self.query.source.kind} to query") diff --git a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py new file mode 100644 index 0000000000000..c6783daa489e0 --- /dev/null +++ b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py @@ -0,0 +1,181 @@ +import json +from posthog.constants import ExperimentNoResultsErrorKeys +from posthog.hogql import ast +from posthog.hogql_queries.experiments import CONTROL_VARIANT_KEY +from posthog.hogql_queries.experiments.funnels_statistics import ( + are_results_significant, + calculate_credible_intervals, + calculate_probabilities, +) +from posthog.hogql_queries.query_runner import QueryRunner +from posthog.models.experiment import Experiment +from ..insights.funnels.funnels_query_runner import FunnelsQueryRunner +from posthog.schema import ( + CachedExperimentFunnelsQueryResponse, + ExperimentFunnelsQuery, + ExperimentFunnelsQueryResponse, + ExperimentSignificanceCode, + ExperimentVariantFunnelsBaseStats, + FunnelsQuery, + FunnelsQueryResponse, + InsightDateRange, + BreakdownFilter, +) +from typing import Optional, Any, cast +from zoneinfo import ZoneInfo +from rest_framework.exceptions import ValidationError + + +class ExperimentFunnelsQueryRunner(QueryRunner): + query: ExperimentFunnelsQuery + response: ExperimentFunnelsQueryResponse + cached_response: CachedExperimentFunnelsQueryResponse + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.experiment = Experiment.objects.get(id=self.query.experiment_id) + self.feature_flag = self.experiment.feature_flag + self.variants = [variant["key"] for variant in self.feature_flag.variants] + self.prepared_funnel_query = self._prepare_funnel_query() + self.funnels_query_runner = FunnelsQueryRunner( + query=self.prepared_funnel_query, team=self.team, timings=self.timings, limit_context=self.limit_context + ) + + def calculate(self) -> ExperimentFunnelsQueryResponse: + funnels_result = self.funnels_query_runner.calculate() + + self._validate_event_variants(funnels_result) + + # Statistical analysis + control_variant, test_variants = self._get_variants_with_base_stats(funnels_result) + probabilities = calculate_probabilities(control_variant, test_variants) + significance_code, loss = are_results_significant(control_variant, test_variants, probabilities) + credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) + + return ExperimentFunnelsQueryResponse( + insight=funnels_result, + variants=[variant.model_dump() for variant in [control_variant, *test_variants]], + probability={ + variant.key: probability + for variant, probability in zip([control_variant, *test_variants], probabilities) + }, + significant=significance_code == ExperimentSignificanceCode.SIGNIFICANT, + significance_code=significance_code, + expected_loss=loss, + credible_intervals=credible_intervals, + ) + + def _prepare_funnel_query(self) -> FunnelsQuery: + """ + This method takes the raw funnel query and adapts it + for the needs of experiment analysis: + + 1. Set the date range to match the experiment's duration, using the project's timezone. + 2. Configure the breakdown to use the feature flag key, which allows us + to separate results for different experiment variants. + """ + # Clone the source query + prepared_funnel_query = FunnelsQuery(**self.query.source.model_dump()) + + # Set the date range to match the experiment's duration, using the project's timezone + if self.team.timezone: + tz = ZoneInfo(self.team.timezone) + start_date = self.experiment.start_date.astimezone(tz) if self.experiment.start_date else None + end_date = self.experiment.end_date.astimezone(tz) if self.experiment.end_date else None + else: + start_date = self.experiment.start_date + end_date = self.experiment.end_date + + prepared_funnel_query.dateRange = InsightDateRange( + date_from=start_date.isoformat() if start_date else None, + date_to=end_date.isoformat() if end_date else None, + explicitDate=True, + ) + + # Configure the breakdown to use the feature flag key + prepared_funnel_query.breakdownFilter = BreakdownFilter( + breakdown=f"$feature/{self.feature_flag.key}", + breakdown_type="event", + ) + + return prepared_funnel_query + + def _get_variants_with_base_stats( + self, funnels_result: FunnelsQueryResponse + ) -> tuple[ExperimentVariantFunnelsBaseStats, list[ExperimentVariantFunnelsBaseStats]]: + control_variant: Optional[ExperimentVariantFunnelsBaseStats] = None + test_variants = [] + + for result in funnels_result.results: + result_dict = cast(list[dict[str, Any]], result) + first_step = result_dict[0] + last_step = result_dict[-1] + + total = first_step.get("count", 0) + success = last_step.get("count", 0) if len(result_dict) > 1 else 0 + failure = total - success + + breakdown_value = cast(list[str], first_step["breakdown_value"])[0] + + if breakdown_value == CONTROL_VARIANT_KEY: + control_variant = ExperimentVariantFunnelsBaseStats( + key=breakdown_value, + success_count=int(success), + failure_count=int(failure), + ) + else: + test_variants.append( + ExperimentVariantFunnelsBaseStats( + key=breakdown_value, success_count=int(success), failure_count=int(failure) + ) + ) + + if control_variant is None: + raise ValueError("Control variant not found in count results") + + return control_variant, test_variants + + def _validate_event_variants(self, funnels_result: FunnelsQueryResponse): + errors = { + ExperimentNoResultsErrorKeys.NO_EVENTS: True, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + + if not funnels_result.results or not funnels_result.results: + raise ValidationError(code="no-results", detail=json.dumps(errors)) + + errors[ExperimentNoResultsErrorKeys.NO_EVENTS] = False + + # Funnels: the first step must be present for *any* results to show up + eventsWithOrderZero = [] + for eventArr in funnels_result.results: + for event in eventArr: + event_dict = cast(dict[str, Any], event) + if event_dict.get("order") == 0: + eventsWithOrderZero.append(event_dict) + + # Check if "control" is present + for event in eventsWithOrderZero: + event_variant = event.get("breakdown_value", [None])[0] + if event_variant == "control": + errors[ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT] = False + errors[ExperimentNoResultsErrorKeys.NO_FLAG_INFO] = False + break + + # Check if at least one of the test variants is present + test_variants = [variant for variant in self.variants if variant != "control"] + for event in eventsWithOrderZero: + event_variant = event.get("breakdown_value", [None])[0] + if event_variant in test_variants: + errors[ExperimentNoResultsErrorKeys.NO_TEST_VARIANT] = False + errors[ExperimentNoResultsErrorKeys.NO_FLAG_INFO] = False + break + + has_errors = any(errors.values()) + if has_errors: + raise ValidationError(detail=json.dumps(errors)) + + def to_query(self) -> ast.SelectQuery: + raise ValueError(f"Cannot convert source query of type {self.query.source.kind} to query") diff --git a/posthog/hogql_queries/experiments/experiment_trend_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py similarity index 62% rename from posthog/hogql_queries/experiments/experiment_trend_query_runner.py rename to posthog/hogql_queries/experiments/experiment_trends_query_runner.py index b63ee09bd2c09..7389b65a29bf6 100644 --- a/posthog/hogql_queries/experiments/experiment_trend_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py @@ -1,38 +1,50 @@ +import json from zoneinfo import ZoneInfo from django.conf import settings +from posthog.constants import ExperimentNoResultsErrorKeys from posthog.hogql import ast +from posthog.hogql_queries.experiments import CONTROL_VARIANT_KEY +from posthog.hogql_queries.experiments.trends_statistics import ( + are_results_significant, + calculate_credible_intervals, + calculate_probabilities, +) from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.hogql_queries.query_runner import QueryRunner from posthog.models.experiment import Experiment from posthog.queries.trends.util import ALL_SUPPORTED_MATH_FUNCTIONS +from rest_framework.exceptions import ValidationError from posthog.schema import ( BaseMathType, BreakdownFilter, - CachedExperimentTrendQueryResponse, + CachedExperimentTrendsQueryResponse, ChartDisplayType, EventPropertyFilter, EventsNode, - ExperimentTrendQuery, - ExperimentTrendQueryResponse, - ExperimentVariantTrendResult, + ExperimentSignificanceCode, + ExperimentTrendsQuery, + ExperimentTrendsQueryResponse, + ExperimentVariantTrendsBaseStats, InsightDateRange, PropertyMathType, TrendsFilter, TrendsQuery, + TrendsQueryResponse, ) from typing import Any, Optional import threading -class ExperimentTrendQueryRunner(QueryRunner): - query: ExperimentTrendQuery - response: ExperimentTrendQueryResponse - cached_response: CachedExperimentTrendQueryResponse +class ExperimentTrendsQueryRunner(QueryRunner): + query: ExperimentTrendsQuery + response: ExperimentTrendsQueryResponse + cached_response: CachedExperimentTrendsQueryResponse def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.experiment = Experiment.objects.get(id=self.query.experiment_id) self.feature_flag = self.experiment.feature_flag + self.variants = [variant["key"] for variant in self.feature_flag.variants] self.breakdown_key = f"$feature/{self.feature_flag.key}" self.prepared_count_query = self._prepare_count_query() @@ -195,8 +207,8 @@ def _prepare_exposure_query(self) -> TrendsQuery: return prepared_exposure_query - def calculate(self) -> ExperimentTrendQueryResponse: - shared_results: dict[str, Optional[Any]] = {"count_response": None, "exposure_response": None} + def calculate(self) -> ExperimentTrendsQueryResponse: + shared_results: dict[str, Optional[Any]] = {"count_result": None, "exposure_result": None} errors = [] def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): @@ -214,12 +226,12 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): # This exists so that we're not spawning threads during unit tests if settings.IN_UNIT_TESTING: - run(self.count_query_runner, "count_response", False) - run(self.exposure_query_runner, "exposure_response", False) + run(self.count_query_runner, "count_result", False) + run(self.exposure_query_runner, "exposure_result", False) else: jobs = [ - threading.Thread(target=run, args=(self.count_query_runner, "count_response", True)), - threading.Thread(target=run, args=(self.exposure_query_runner, "exposure_response", True)), + threading.Thread(target=run, args=(self.count_query_runner, "count_result", True)), + threading.Thread(target=run, args=(self.exposure_query_runner, "exposure_result", True)), ] [j.start() for j in jobs] # type: ignore [j.join() for j in jobs] # type: ignore @@ -228,35 +240,112 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): if errors: raise errors[0] - count_response = shared_results["count_response"] - exposure_response = shared_results["exposure_response"] + count_result = shared_results["count_result"] + exposure_result = shared_results["exposure_result"] - if count_response is None or exposure_response is None: + if count_result is None or exposure_result is None: raise ValueError("One or both query runners failed to produce a response") - results = self._process_results(count_response.results, exposure_response.results) - return ExperimentTrendQueryResponse(insight="TRENDS", results=results) + self._validate_event_variants(count_result) + + # Statistical analysis + control_variant, test_variants = self._get_variants_with_base_stats(count_result, exposure_result) + probabilities = calculate_probabilities(control_variant, test_variants) + significance_code, p_value = are_results_significant(control_variant, test_variants, probabilities) + credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) + + return ExperimentTrendsQueryResponse( + insight=count_result, + variants=[variant.model_dump() for variant in [control_variant, *test_variants]], + probability={ + variant.key: probability + for variant, probability in zip([control_variant, *test_variants], probabilities) + }, + significant=significance_code == ExperimentSignificanceCode.SIGNIFICANT, + significance_code=significance_code, + p_value=p_value, + credible_intervals=credible_intervals, + ) - def _process_results( - self, count_results: list[dict[str, Any]], exposure_results: list[dict[str, Any]] - ) -> dict[str, ExperimentVariantTrendResult]: - variants = self.feature_flag.variants - processed_results = { - variant["key"]: ExperimentVariantTrendResult(key=variant["key"], count=0, exposure=0, absolute_exposure=0) - for variant in variants - } + def _get_variants_with_base_stats( + self, count_results: TrendsQueryResponse, exposure_results: TrendsQueryResponse + ) -> tuple[ExperimentVariantTrendsBaseStats, list[ExperimentVariantTrendsBaseStats]]: + control_variant: Optional[ExperimentVariantTrendsBaseStats] = None + test_variants = [] + exposure_counts = {} + exposure_ratios = {} + + for result in exposure_results.results: + count = result.get("count", 0) + breakdown_value = result.get("breakdown_value") + exposure_counts[breakdown_value] = count + + control_exposure = exposure_counts.get(CONTROL_VARIANT_KEY, 0) + + if control_exposure != 0: + for key, count in exposure_counts.items(): + exposure_ratios[key] = count / control_exposure + + for result in count_results.results: + count = result.get("count", 0) + breakdown_value = result.get("breakdown_value") + if breakdown_value == CONTROL_VARIANT_KEY: + control_variant = ExperimentVariantTrendsBaseStats( + key=breakdown_value, + count=count, + exposure=1, + # TODO: in the absence of exposure data, we should throw rather than default to 1 + absolute_exposure=exposure_counts.get(breakdown_value, 1), + ) + else: + test_variants.append( + ExperimentVariantTrendsBaseStats( + key=breakdown_value, + count=count, + # TODO: in the absence of exposure data, we should throw rather than default to 1 + exposure=exposure_ratios.get(breakdown_value, 1), + absolute_exposure=exposure_counts.get(breakdown_value, 1), + ) + ) - for result in count_results: - variant = result.get("breakdown_value") - if variant in processed_results: - processed_results[variant].count += result.get("count", 0) + if control_variant is None: + raise ValueError("Control variant not found in count results") - for result in exposure_results: - variant = result.get("breakdown_value") - if variant in processed_results: - processed_results[variant].absolute_exposure += result.get("count", 0) + return control_variant, test_variants + + def _validate_event_variants(self, count_result: TrendsQueryResponse): + errors = { + ExperimentNoResultsErrorKeys.NO_EVENTS: True, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } - return processed_results + if not count_result.results or not count_result.results[0]: + raise ValidationError(code="no-results", detail=json.dumps(errors)) + + errors[ExperimentNoResultsErrorKeys.NO_EVENTS] = False + + # Check if "control" is present + for event in count_result.results: + event_variant = event.get("breakdown_value") + if event_variant == "control": + errors[ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT] = False + errors[ExperimentNoResultsErrorKeys.NO_FLAG_INFO] = False + break + # Check if at least one of the test variants is present + test_variants = [variant for variant in self.variants if variant != "control"] + + for event in count_result.results: + event_variant = event.get("breakdown_value") + if event_variant in test_variants: + errors[ExperimentNoResultsErrorKeys.NO_TEST_VARIANT] = False + errors[ExperimentNoResultsErrorKeys.NO_FLAG_INFO] = False + break + + has_errors = any(errors.values()) + if has_errors: + raise ValidationError(detail=json.dumps(errors)) def to_query(self) -> ast.SelectQuery: raise ValueError(f"Cannot convert source query of type {self.query.count_query.kind} to query") diff --git a/posthog/hogql_queries/experiments/funnel_statistics.py b/posthog/hogql_queries/experiments/funnels_statistics.py similarity index 92% rename from posthog/hogql_queries/experiments/funnel_statistics.py rename to posthog/hogql_queries/experiments/funnels_statistics.py index 86a4474a8d5df..cdec48fa3c681 100644 --- a/posthog/hogql_queries/experiments/funnel_statistics.py +++ b/posthog/hogql_queries/experiments/funnels_statistics.py @@ -2,20 +2,19 @@ from numpy.random import default_rng from sentry_sdk import capture_exception import scipy.stats as stats -from posthog.constants import ExperimentSignificanceCode from posthog.hogql_queries.experiments import ( EXPECTED_LOSS_SIGNIFICANCE_LEVEL, FF_DISTRIBUTION_THRESHOLD, MIN_PROBABILITY_FOR_SIGNIFICANCE, ) -from posthog.schema import ExperimentVariantFunnelResult +from posthog.schema import ExperimentSignificanceCode, ExperimentVariantFunnelsBaseStats Probability = float def calculate_probabilities( - control_variant: ExperimentVariantFunnelResult, - test_variants: list[ExperimentVariantFunnelResult], + control_variant: ExperimentVariantFunnelsBaseStats, + test_variants: list[ExperimentVariantFunnelsBaseStats], priors: tuple[int, int] = (1, 1), ) -> list[Probability]: """ @@ -61,7 +60,7 @@ def calculate_probabilities( def simulate_winning_variant_for_conversion( - target_variant: ExperimentVariantFunnelResult, variants: list[ExperimentVariantFunnelResult] + target_variant: ExperimentVariantFunnelsBaseStats, variants: list[ExperimentVariantFunnelsBaseStats] ) -> Probability: random_sampler = default_rng() prior_success = 1 @@ -95,11 +94,11 @@ def simulate_winning_variant_for_conversion( def are_results_significant( - control_variant: ExperimentVariantFunnelResult, - test_variants: list[ExperimentVariantFunnelResult], + control_variant: ExperimentVariantFunnelsBaseStats, + test_variants: list[ExperimentVariantFunnelsBaseStats], probabilities: list[Probability], ) -> tuple[ExperimentSignificanceCode, Probability]: - def get_conversion_rate(variant: ExperimentVariantFunnelResult): + def get_conversion_rate(variant: ExperimentVariantFunnelsBaseStats): return variant.success_count / (variant.success_count + variant.failure_count) control_sample_size = control_variant.success_count + control_variant.failure_count @@ -137,7 +136,7 @@ def get_conversion_rate(variant: ExperimentVariantFunnelResult): def calculate_expected_loss( - target_variant: ExperimentVariantFunnelResult, variants: list[ExperimentVariantFunnelResult] + target_variant: ExperimentVariantFunnelsBaseStats, variants: list[ExperimentVariantFunnelsBaseStats] ) -> float: """ Calculates expected loss in conversion rate for a given variant. diff --git a/posthog/hogql_queries/experiments/test/test_experiment_funnel_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_funnel_query_runner.py deleted file mode 100644 index 7d1472d29315a..0000000000000 --- a/posthog/hogql_queries/experiments/test/test_experiment_funnel_query_runner.py +++ /dev/null @@ -1,107 +0,0 @@ -from posthog.hogql_queries.experiments.experiment_funnel_query_runner import ExperimentFunnelQueryRunner -from posthog.models.experiment import Experiment -from posthog.models.feature_flag.feature_flag import FeatureFlag -from posthog.schema import ( - EventsNode, - ExperimentFunnelQuery, - ExperimentFunnelQueryResponse, - FunnelsQuery, -) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events -from freezegun import freeze_time -from typing import cast -from django.utils import timezone -from datetime import timedelta - - -class TestExperimentFunnelQueryRunner(ClickhouseTestMixin, APIBaseTest): - @freeze_time("2020-01-01T12:00:00Z") - def test_query_runner(self): - feature_flag = FeatureFlag.objects.create( - name="Test experiment flag", - key="test-experiment", - team=self.team, - filters={ - "groups": [{"properties": [], "rollout_percentage": None}], - "multivariate": { - "variants": [ - { - "key": "control", - "name": "Control", - "rollout_percentage": 50, - }, - { - "key": "test", - "name": "Test", - "rollout_percentage": 50, - }, - ] - }, - }, - created_by=self.user, - ) - - experiment = Experiment.objects.create( - name="test-experiment", - team=self.team, - feature_flag=feature_flag, - start_date=timezone.now(), - end_date=timezone.now() + timedelta(days=14), - ) - - feature_flag_property = f"$feature/{feature_flag.key}" - - funnels_query = FunnelsQuery( - series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], - dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, - ) - experiment_query = ExperimentFunnelQuery( - experiment_id=experiment.id, - kind="ExperimentFunnelQuery", - source=funnels_query, - ) - - experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] - experiment.save() - - for variant, purchase_count in [("control", 6), ("test", 8)]: - for i in range(10): - _create_person(distinct_ids=[f"user_{variant}_{i}"], team_id=self.team.pk) - _create_event( - team=self.team, - event="$pageview", - distinct_id=f"user_{variant}_{i}", - timestamp="2020-01-02T12:00:00Z", - properties={feature_flag_property: variant}, - ) - if i < purchase_count: - _create_event( - team=self.team, - event="purchase", - distinct_id=f"user_{variant}_{i}", - timestamp="2020-01-02T12:01:00Z", - properties={feature_flag_property: variant}, - ) - - flush_persons_and_events() - - query_runner = ExperimentFunnelQueryRunner( - query=ExperimentFunnelQuery(**experiment.metrics[0]["query"]), team=self.team - ) - result = query_runner.calculate() - - self.assertEqual(result.insight, "FUNNELS") - self.assertEqual(len(result.results), 2) - - funnel_result = cast(ExperimentFunnelQueryResponse, result) - - self.assertIn("control", funnel_result.results) - self.assertIn("test", funnel_result.results) - - control_result = funnel_result.results["control"] - test_result = funnel_result.results["test"] - - self.assertEqual(control_result.success_count, 6) - self.assertEqual(control_result.failure_count, 4) - self.assertEqual(test_result.success_count, 8) - self.assertEqual(test_result.failure_count, 2) diff --git a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py new file mode 100644 index 0000000000000..005fe82e089ae --- /dev/null +++ b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py @@ -0,0 +1,359 @@ +from typing import cast +from posthog.hogql_queries.experiments.experiment_funnels_query_runner import ExperimentFunnelsQueryRunner +from posthog.models.experiment import Experiment +from posthog.models.feature_flag.feature_flag import FeatureFlag +from posthog.schema import ( + EventsNode, + ExperimentFunnelsQuery, + ExperimentSignificanceCode, + FunnelsQuery, +) +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events +from freezegun import freeze_time +from django.utils import timezone +from datetime import timedelta +from rest_framework.exceptions import ValidationError +from posthog.constants import ExperimentNoResultsErrorKeys +import json +from posthog.test.test_journeys import journeys_for + + +class TestExperimentFunnelsQueryRunner(ClickhouseTestMixin, APIBaseTest): + def create_feature_flag(self, key="test-experiment"): + return FeatureFlag.objects.create( + name=f"Test experiment flag: {key}", + key=key, + team=self.team, + filters={ + "groups": [{"properties": [], "rollout_percentage": None}], + "multivariate": { + "variants": [ + { + "key": "control", + "name": "Control", + "rollout_percentage": 50, + }, + { + "key": "test", + "name": "Test", + "rollout_percentage": 50, + }, + ] + }, + }, + created_by=self.user, + ) + + def create_experiment(self, name="test-experiment", feature_flag=None): + if feature_flag is None: + feature_flag = self.create_feature_flag(name) + return Experiment.objects.create( + name=name, + team=self.team, + feature_flag=feature_flag, + start_date=timezone.now(), + end_date=timezone.now() + timedelta(days=14), + ) + + @freeze_time("2020-01-01T12:00:00Z") + def test_query_runner(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + feature_flag_property = f"$feature/{feature_flag.key}" + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + for variant, purchase_count in [("control", 6), ("test", 8)]: + for i in range(10): + _create_person(distinct_ids=[f"user_{variant}_{i}"], team_id=self.team.pk) + _create_event( + team=self.team, + event="$pageview", + distinct_id=f"user_{variant}_{i}", + timestamp="2020-01-02T12:00:00Z", + properties={feature_flag_property: variant}, + ) + if i < purchase_count: + _create_event( + team=self.team, + event="purchase", + distinct_id=f"user_{variant}_{i}", + timestamp="2020-01-02T12:01:00Z", + properties={feature_flag_property: variant}, + ) + + flush_persons_and_events() + + query_runner = ExperimentFunnelsQueryRunner( + query=ExperimentFunnelsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + result = query_runner.calculate() + + self.assertEqual(len(result.variants), 2) + + control_variant = next(variant for variant in result.variants if variant.key == "control") + test_variant = next(variant for variant in result.variants if variant.key == "test") + + self.assertEqual(control_variant.success_count, 6) + self.assertEqual(control_variant.failure_count, 4) + self.assertEqual(test_variant.success_count, 8) + self.assertEqual(test_variant.failure_count, 2) + + self.assertIn("control", result.probability) + self.assertIn("test", result.probability) + + self.assertIn("control", result.credible_intervals) + self.assertIn("test", result.credible_intervals) + + @freeze_time("2020-01-01T12:00:00Z") + def test_query_runner_standard_flow(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + journeys_for( + { + "user_control_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "control"}}, + ], + "user_control_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + ], + "user_control_3": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "control"}}, + ], + "user_test_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + ], + "user_test_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + ], + "user_test_3": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + ], + "user_test_4": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + query_runner = ExperimentFunnelsQueryRunner( + query=ExperimentFunnelsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + result = query_runner.calculate() + + self.assertEqual(len(result.variants), 2) + for variant in result.variants: + self.assertIn(variant.key, ["control", "test"]) + + control_variant = next(v for v in result.variants if v.key == "control") + test_variant = next(v for v in result.variants if v.key == "test") + + self.assertEqual(control_variant.success_count, 2) + self.assertEqual(control_variant.failure_count, 1) + self.assertEqual(test_variant.success_count, 3) + self.assertEqual(test_variant.failure_count, 1) + + self.assertAlmostEqual(result.probability["control"], 0.407, places=2) + self.assertAlmostEqual(result.probability["test"], 0.593, places=2) + + self.assertAlmostEqual(result.credible_intervals["control"][0], 0.1941, places=3) + self.assertAlmostEqual(result.credible_intervals["control"][1], 0.9324, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][0], 0.2836, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][1], 0.9473, places=3) + + self.assertEqual(result.significance_code, ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + + self.assertFalse(result.significant) + self.assertEqual(len(result.variants), 2) + self.assertAlmostEqual(result.expected_loss, 1.0, places=1) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_events(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: True, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_control(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + journeys_for( + { + "user_test": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: False, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: False, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_test(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + journeys_for( + { + "user_control": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + {"event": "purchase", "timestamp": "2020-01-03", "properties": {ff_property: "control"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: False, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: False, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_flag_info(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + journeys_for( + { + "user_no_flag_1": [ + {"event": "$pageview", "timestamp": "2020-01-02"}, + {"event": "purchase", "timestamp": "2020-01-03"}, + ], + "user_no_flag_2": [ + {"event": "$pageview", "timestamp": "2020-01-03"}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + funnels_query = FunnelsQuery( + series=[EventsNode(event="$pageview"), EventsNode(event="purchase")], + dateRange={"date_from": "2020-01-01", "date_to": "2020-01-14"}, + ) + experiment_query = ExperimentFunnelsQuery( + experiment_id=experiment.id, + kind="ExperimentFunnelsQuery", + source=funnels_query, + ) + + query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: False, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trend_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py similarity index 50% rename from posthog/hogql_queries/experiments/test/test_experiment_trend_query_runner.py rename to posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py index b92e58280f7a9..bb3357e62232b 100644 --- a/posthog/hogql_queries/experiments/test/test_experiment_trend_query_runner.py +++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py @@ -1,11 +1,12 @@ from django.test import override_settings -from posthog.hogql_queries.experiments.experiment_trend_query_runner import ExperimentTrendQueryRunner +from posthog.hogql_queries.experiments.experiment_trends_query_runner import ExperimentTrendsQueryRunner from posthog.models.experiment import Experiment from posthog.models.feature_flag.feature_flag import FeatureFlag from posthog.schema import ( EventsNode, - ExperimentTrendQuery, - ExperimentTrendQueryResponse, + ExperimentSignificanceCode, + ExperimentTrendsQuery, + ExperimentTrendsQueryResponse, TrendsQuery, ) from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, flush_persons_and_events @@ -14,10 +15,13 @@ from django.utils import timezone from datetime import timedelta from posthog.test.test_journeys import journeys_for +from rest_framework.exceptions import ValidationError +from posthog.constants import ExperimentNoResultsErrorKeys +import json @override_settings(IN_UNIT_TESTING=True) -class TestExperimentTrendQueryRunner(ClickhouseTestMixin, APIBaseTest): +class TestExperimentTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest): def create_feature_flag(self, key="test-experiment"): return FeatureFlag.objects.create( name=f"Test experiment flag: {key}", @@ -63,9 +67,9 @@ def test_query_runner(self): count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")]) - experiment_query = ExperimentTrendQuery( + experiment_query = ExperimentTrendsQuery( experiment_id=experiment.id, - kind="ExperimentTrendQuery", + kind="ExperimentTrendsQuery", count_query=count_query, exposure_query=exposure_query, ) @@ -95,25 +99,18 @@ def test_query_runner(self): flush_persons_and_events() - query_runner = ExperimentTrendQueryRunner( - query=ExperimentTrendQuery(**experiment.metrics[0]["query"]), team=self.team + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) result = query_runner.calculate() - self.assertEqual(result.insight, "TRENDS") - self.assertEqual(len(result.results), 2) + self.assertEqual(len(result.variants), 2) - trend_result = cast(ExperimentTrendQueryResponse, result) - - self.assertIn("control", trend_result.results) - self.assertIn("test", trend_result.results) - - control_result = trend_result.results["control"] - test_result = trend_result.results["test"] + control_result = next(variant for variant in result.variants if variant.key == "control") + test_result = next(variant for variant in result.variants if variant.key == "test") self.assertEqual(control_result.count, 11) self.assertEqual(test_result.count, 15) - self.assertEqual(control_result.absolute_exposure, 7) self.assertEqual(test_result.absolute_exposure, 9) @@ -128,9 +125,9 @@ def test_query_runner_with_custom_exposure(self): series=[EventsNode(event="custom_exposure_event", properties=[{"key": "valid_exposure", "value": "true"}])] ) - experiment_query = ExperimentTrendQuery( + experiment_query = ExperimentTrendsQuery( experiment_id=experiment.id, - kind="ExperimentTrendQuery", + kind="ExperimentTrendsQuery", count_query=count_query, exposure_query=exposure_query, ) @@ -200,18 +197,15 @@ def test_query_runner_with_custom_exposure(self): flush_persons_and_events() - query_runner = ExperimentTrendQueryRunner( - query=ExperimentTrendQuery(**experiment.metrics[0]["query"]), team=self.team + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) result = query_runner.calculate() - trend_result = cast(ExperimentTrendQueryResponse, result) + trend_result = cast(ExperimentTrendsQueryResponse, result) - self.assertIn("control", trend_result.results) - self.assertIn("test", trend_result.results) - - control_result = trend_result.results["control"] - test_result = trend_result.results["test"] + control_result = next(variant for variant in trend_result.variants if variant.key == "control") + test_result = next(variant for variant in trend_result.variants if variant.key == "test") self.assertEqual(control_result.count, 3) self.assertEqual(test_result.count, 5) @@ -227,9 +221,9 @@ def test_query_runner_with_default_exposure(self): ff_property = f"$feature/{feature_flag.key}" count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) - experiment_query = ExperimentTrendQuery( + experiment_query = ExperimentTrendsQuery( experiment_id=experiment.id, - kind="ExperimentTrendQuery", + kind="ExperimentTrendsQuery", count_query=count_query, exposure_query=None, # No exposure query provided ) @@ -295,18 +289,15 @@ def test_query_runner_with_default_exposure(self): flush_persons_and_events() - query_runner = ExperimentTrendQueryRunner( - query=ExperimentTrendQuery(**experiment.metrics[0]["query"]), team=self.team + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) result = query_runner.calculate() - trend_result = cast(ExperimentTrendQueryResponse, result) - - self.assertIn("control", trend_result.results) - self.assertIn("test", trend_result.results) + trend_result = cast(ExperimentTrendsQueryResponse, result) - control_result = trend_result.results["control"] - test_result = trend_result.results["test"] + control_result = next(variant for variant in trend_result.variants if variant.key == "control") + test_result = next(variant for variant in trend_result.variants if variant.key == "test") self.assertEqual(control_result.count, 3) self.assertEqual(test_result.count, 5) @@ -322,9 +313,9 @@ def test_query_runner_with_avg_math(self): count_query = TrendsQuery(series=[EventsNode(event="$pageview", math="avg")]) exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")]) - experiment_query = ExperimentTrendQuery( + experiment_query = ExperimentTrendsQuery( experiment_id=experiment.id, - kind="ExperimentTrendQuery", + kind="ExperimentTrendsQuery", count_query=count_query, exposure_query=exposure_query, ) @@ -332,9 +323,247 @@ def test_query_runner_with_avg_math(self): experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] experiment.save() - query_runner = ExperimentTrendQueryRunner( - query=ExperimentTrendQuery(**experiment.metrics[0]["query"]), team=self.team + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) prepared_count_query = query_runner.prepared_count_query self.assertEqual(prepared_count_query.series[0].math, "sum") + + @freeze_time("2020-01-01T12:00:00Z") + def test_query_runner_standard_flow(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + exposure_query = TrendsQuery(series=[EventsNode(event="$feature_flag_called")]) + + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + exposure_query=exposure_query, + ) + + experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}] + experiment.save() + + journeys_for( + { + "user_control_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "control"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": {ff_property: "control"}, + }, + ], + "user_control_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + { + "event": "$feature_flag_called", + "timestamp": "2020-01-02", + "properties": {ff_property: "control"}, + }, + ], + "user_test_1": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-04", "properties": {ff_property: "test"}}, + {"event": "$feature_flag_called", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + ], + "user_test_2": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + {"event": "$pageview", "timestamp": "2020-01-03", "properties": {ff_property: "test"}}, + {"event": "$feature_flag_called", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + query_runner = ExperimentTrendsQueryRunner( + query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team + ) + result = query_runner.calculate() + + self.assertEqual(len(result.variants), 2) + for variant in result.variants: + self.assertIn(variant.key, ["control", "test"]) + + control_variant = next(v for v in result.variants if v.key == "control") + test_variant = next(v for v in result.variants if v.key == "test") + + self.assertEqual(control_variant.count, 3) + self.assertEqual(test_variant.count, 5) + self.assertEqual(control_variant.absolute_exposure, 2) + self.assertEqual(test_variant.absolute_exposure, 2) + + self.assertAlmostEqual(result.credible_intervals["control"][0], 0.5449, places=3) + self.assertAlmostEqual(result.credible_intervals["control"][1], 4.3836, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][0], 1.1009, places=3) + self.assertAlmostEqual(result.credible_intervals["test"][1], 5.8342, places=3) + + self.assertAlmostEqual(result.p_value, 1.0, places=3) + + self.assertAlmostEqual(result.probability["control"], 0.2549, places=2) + self.assertAlmostEqual(result.probability["test"], 0.7453, places=2) + + self.assertEqual(result.significance_code, ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + + self.assertFalse(result.significant) + + self.assertEqual(len(result.variants), 2) + + self.assertEqual(control_variant.absolute_exposure, 2.0) + self.assertEqual(control_variant.count, 3.0) + self.assertEqual(control_variant.exposure, 1.0) + + self.assertEqual(test_variant.absolute_exposure, 2.0) + self.assertEqual(test_variant.count, 5.0) + self.assertEqual(test_variant.exposure, 1.0) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_events(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + ) + + query_runner = ExperimentTrendsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: True, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_control(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + journeys_for( + { + "user_test": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "test"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + ) + + query_runner = ExperimentTrendsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: False, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: False, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_test(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + ff_property = f"$feature/{feature_flag.key}" + journeys_for( + { + "user_control": [ + {"event": "$pageview", "timestamp": "2020-01-02", "properties": {ff_property: "control"}}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + ) + + query_runner = ExperimentTrendsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: False, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: False, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: False, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) + + @freeze_time("2020-01-01T12:00:00Z") + def test_validate_event_variants_no_flag_info(self): + feature_flag = self.create_feature_flag() + experiment = self.create_experiment(feature_flag=feature_flag) + + journeys_for( + { + "user_no_flag_1": [ + {"event": "$pageview", "timestamp": "2020-01-02"}, + ], + "user_no_flag_2": [ + {"event": "$pageview", "timestamp": "2020-01-03"}, + ], + }, + self.team, + ) + + flush_persons_and_events() + + count_query = TrendsQuery(series=[EventsNode(event="$pageview")]) + experiment_query = ExperimentTrendsQuery( + experiment_id=experiment.id, + kind="ExperimentTrendsQuery", + count_query=count_query, + ) + + query_runner = ExperimentTrendsQueryRunner(query=experiment_query, team=self.team) + with self.assertRaises(ValidationError) as context: + query_runner.calculate() + + expected_errors = json.dumps( + { + ExperimentNoResultsErrorKeys.NO_EVENTS: True, + ExperimentNoResultsErrorKeys.NO_FLAG_INFO: True, + ExperimentNoResultsErrorKeys.NO_CONTROL_VARIANT: True, + ExperimentNoResultsErrorKeys.NO_TEST_VARIANT: True, + } + ) + self.assertEqual(cast(list, context.exception.detail)[0], expected_errors) diff --git a/posthog/hogql_queries/experiments/trend_statistics.py b/posthog/hogql_queries/experiments/trends_statistics.py similarity index 92% rename from posthog/hogql_queries/experiments/trend_statistics.py rename to posthog/hogql_queries/experiments/trends_statistics.py index f35a1b42136b3..61b19d1486f72 100644 --- a/posthog/hogql_queries/experiments/trend_statistics.py +++ b/posthog/hogql_queries/experiments/trends_statistics.py @@ -11,14 +11,14 @@ MIN_PROBABILITY_FOR_SIGNIFICANCE, P_VALUE_SIGNIFICANCE_LEVEL, ) -from posthog.constants import ExperimentSignificanceCode -from posthog.schema import ExperimentVariantTrendResult + +from posthog.schema import ExperimentSignificanceCode, ExperimentVariantTrendsBaseStats Probability = float def calculate_probabilities( - control_variant: ExperimentVariantTrendResult, test_variants: list[ExperimentVariantTrendResult] + control_variant: ExperimentVariantTrendsBaseStats, test_variants: list[ExperimentVariantTrendsBaseStats] ) -> list[Probability]: """ Calculates probability that A is better than B. First variant is control, rest are test variants. @@ -59,7 +59,7 @@ def calculate_probabilities( def simulate_winning_variant_for_arrival_rates( - target_variant: ExperimentVariantTrendResult, variants: list[ExperimentVariantTrendResult] + target_variant: ExperimentVariantTrendsBaseStats, variants: list[ExperimentVariantTrendsBaseStats] ) -> float: random_sampler = default_rng() simulations_count = 100_000 @@ -85,8 +85,8 @@ def simulate_winning_variant_for_arrival_rates( def are_results_significant( - control_variant: ExperimentVariantTrendResult, - test_variants: list[ExperimentVariantTrendResult], + control_variant: ExperimentVariantTrendsBaseStats, + test_variants: list[ExperimentVariantTrendsBaseStats], probabilities: list[Probability], ) -> tuple[ExperimentSignificanceCode, Probability]: # TODO: Experiment with Expected Loss calculations for trend experiments @@ -152,7 +152,7 @@ def poisson_p_value(control_count, control_exposure, test_count, test_exposure): def calculate_p_value( - control_variant: ExperimentVariantTrendResult, test_variants: list[ExperimentVariantTrendResult] + control_variant: ExperimentVariantTrendsBaseStats, test_variants: list[ExperimentVariantTrendsBaseStats] ) -> Probability: best_test_variant = max(test_variants, key=lambda variant: variant.count) diff --git a/posthog/hogql_queries/insights/funnels/__init__.py b/posthog/hogql_queries/insights/funnels/__init__.py index 787cd01ec887d..50d5eddcf8da0 100644 --- a/posthog/hogql_queries/insights/funnels/__init__.py +++ b/posthog/hogql_queries/insights/funnels/__init__.py @@ -5,7 +5,8 @@ from .funnel_unordered import FunnelUnordered from .funnel_time_to_convert import FunnelTimeToConvert from .funnel_trends import FunnelTrends +from .funnel_trends_udf import FunnelTrendsUDF from .funnel_persons import FunnelActors -from .funnel_strict_persons import FunnelStrictActors -from .funnel_unordered_persons import FunnelUnorderedActors -from .funnel_trends_persons import FunnelTrendsActors +from .funnel_strict_actors import FunnelStrictActors +from .funnel_unordered_actors import FunnelUnorderedActors +from .funnel_trends_actors import FunnelTrendsActors diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index cf6836a4dd168..9ddc1b4f26af2 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -89,6 +89,25 @@ def get_step_counts_query(self) -> ast.SelectQuery: def get_step_counts_without_aggregation_query(self) -> ast.SelectQuery: raise NotImplementedError() + # This is a simple heuristic to reduce the number of events we look at in UDF funnels (thus are serialized and sent over) + # We remove an event if it matches one or zero steps and there was already the same type of event before and after it (that don't have the same timestamp) + # arrayRotateRight turns [1,2,3] into [3,1,2] + # arrayRotateLeft turns [1,2,3] into [2,3,1] + # For some reason, using these uses much less memory than using indexing in clickhouse to check the previous and next element + def _udf_event_array_filter(self, timestamp_index: int, prop_val_index: int, steps_index: int): + return f"""arrayFilter( + (x, x_before, x_after) -> not ( + length(x.{steps_index}) <= 1 + and x.{steps_index} == x_before.{steps_index} + and x.{steps_index} == x_after.{steps_index} + and x.{prop_val_index} == x_before.{prop_val_index} + and x.{prop_val_index} == x_after.{prop_val_index} + and x.{timestamp_index} > x_before.{timestamp_index} + and x.{timestamp_index} < x_after.{timestamp_index}), + events_array, + arrayRotateRight(events_array, 1), + arrayRotateLeft(events_array, 1))""" + @cached_property def breakdown_cohorts(self) -> list[Cohort]: team, breakdown = self.context.team, self.context.breakdown @@ -304,7 +323,7 @@ def _serialize_step( "Data warehouse tables are not supported in funnels just yet. For now, please try this funnel without the data warehouse-based step." ) else: - action = Action.objects.get(pk=step.id) + action = Action.objects.get(pk=step.id, team__project_id=self.context.team.project_id) name = action.name action_id = step.id type = "actions" @@ -676,7 +695,7 @@ def _build_step_query( if isinstance(entity, ActionsNode) or isinstance(entity, FunnelExclusionActionsNode): # action - action = Action.objects.get(pk=int(entity.id), team=self.context.team) + action = Action.objects.get(pk=int(entity.id), team__project_id=self.context.team.project_id) event_expr = action_to_expr(action) elif isinstance(entity, DataWarehouseNode): raise ValidationError( diff --git a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py index fe11d9387fba9..11a6c7458c20a 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py +++ b/posthog/hogql_queries/insights/funnels/funnel_correlation_query_runner.py @@ -7,8 +7,8 @@ from posthog.hogql_queries.insights.funnels import FunnelUDF from posthog.hogql_queries.insights.funnels.funnel_event_query import FunnelEventQuery from posthog.hogql_queries.insights.funnels.funnel_persons import FunnelActors -from posthog.hogql_queries.insights.funnels.funnel_strict_persons import FunnelStrictActors -from posthog.hogql_queries.insights.funnels.funnel_unordered_persons import FunnelUnorderedActors +from posthog.hogql_queries.insights.funnels.funnel_strict_actors import FunnelStrictActors +from posthog.hogql_queries.insights.funnels.funnel_unordered_actors import FunnelUnorderedActors from posthog.models.action.action import Action from posthog.models.element.element import chain_to_elements from posthog.models.event.util import ElementSerializer @@ -830,7 +830,7 @@ def _get_funnel_step_names(self) -> list[str]: events: set[str] = set() for entity in self.funnels_query.series: if isinstance(entity, ActionsNode): - action = Action.objects.get(pk=int(entity.id), team=self.context.team) + action = Action.objects.get(pk=int(entity.id), team__project_id=self.context.team.project_id) events.update([x for x in action.get_step_events() if x]) elif isinstance(entity, EventsNode): if entity.event is not None: diff --git a/posthog/hogql_queries/insights/funnels/funnel_event_query.py b/posthog/hogql_queries/insights/funnels/funnel_event_query.py index c4cb9507534ef..99f80dc5ead64 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_event_query.py +++ b/posthog/hogql_queries/insights/funnels/funnel_event_query.py @@ -132,7 +132,7 @@ def _date_range_expr(self) -> ast.Expr: ) def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: - team, query, funnelsFilter = self.context.team, self.context.query, self.context.funnelsFilter + query, funnelsFilter = self.context.query, self.context.funnelsFilter exclusions = funnelsFilter.exclusions or [] if skip_entity_filter is True: @@ -145,7 +145,7 @@ def _entity_expr(self, skip_entity_filter: bool) -> ast.Expr | None: events.add(node.event) elif isinstance(node, ActionsNode) or isinstance(node, FunnelExclusionActionsNode): try: - action = Action.objects.get(pk=int(node.id), team=team) + action = Action.objects.get(pk=int(node.id), team__project_id=self.context.team.project_id) events.update(action.get_step_events()) except Action.DoesNotExist: raise ValidationError(f"Action ID {node.id} does not exist!") diff --git a/posthog/hogql_queries/insights/funnels/funnel_strict_persons.py b/posthog/hogql_queries/insights/funnels/funnel_strict_actors.py similarity index 100% rename from posthog/hogql_queries/insights/funnels/funnel_strict_persons.py rename to posthog/hogql_queries/insights/funnels/funnel_strict_actors.py diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_persons.py b/posthog/hogql_queries/insights/funnels/funnel_trends_actors.py similarity index 100% rename from posthog/hogql_queries/insights/funnels/funnel_trends_persons.py rename to posthog/hogql_queries/insights/funnels/funnel_trends_actors.py diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py index d171249520031..d3a372af506a7 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -1,19 +1,26 @@ -from typing import cast +from typing import cast, Optional + +from rest_framework.exceptions import ValidationError from posthog.hogql import ast from posthog.hogql.constants import HogQLQuerySettings -from posthog.hogql.parser import parse_select +from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.funnels import FunnelTrends -from posthog.hogql_queries.insights.funnels.funnel_udf import udf_event_array_filter from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str from posthog.schema import BreakdownType, BreakdownAttributionType -from posthog.utils import DATERANGE_MAP +from posthog.utils import DATERANGE_MAP, relative_date_parse TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" class FunnelTrendsUDF(FunnelTrends): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # In base, these fields only get added if you're running an actors query + if "uuid" not in self._extra_event_fields: + self._extra_event_fields.append("uuid") + def get_step_counts_query(self): max_steps = self.context.max_steps return self._get_step_counts_query( @@ -30,7 +37,21 @@ def conversion_window_limit(self) -> int: self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() ) - def get_query(self) -> ast.SelectQuery: + def matched_event_select(self): + if self._include_matched_events(): + return """ + groupArray(tuple(timestamp, uuid, $session_id, $window_id)) as user_events, + mapFromArrays(arrayMap(x -> x.2, user_events), user_events) as user_events_map, + [user_events_map[af_tuple.4]] as matching_events, + """ + return "" + + def udf_event_array_filter(self): + return self._udf_event_array_filter(1, 4, 5) + + # This is the function that calls the UDF + # This is used by both the query itself and the actors query + def _inner_aggregation_query(self): # If they're asking for a "to_step" just truncate the funnel funnelsFilter = self.context.funnelsFilter max_steps = self.context.max_steps if funnelsFilter.funnelToStep is None else funnelsFilter.funnelToStep + 1 @@ -79,7 +100,13 @@ def get_query(self) -> ast.SelectQuery: parse_select( f""" SELECT - arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), _toUInt64(toDateTime({get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')})), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) as events_array, + arraySort(t -> t.1, groupArray(tuple( + toFloat(timestamp), + _toUInt64(toDateTime({get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')})), + uuid, + {prop_selector}, + arrayFilter((x) -> x != 0, [{steps}{exclusions}]) + ))) as events_array, arrayJoin({fn}( {from_step}, {max_steps}, @@ -87,11 +114,13 @@ def get_query(self) -> ast.SelectQuery: '{breakdown_attribution_string}', '{self.context.funnelsFilter.funnelOrderType}', {prop_vals}, - {udf_event_array_filter(self.context.funnelsFilter.funnelOrderType)} + {self.udf_event_array_filter()} )) as af_tuple, toTimeZone(toDateTime(_toUInt64(af_tuple.1)), '{self.context.team.timezone}') as entrance_period_start, af_tuple.2 as success_bool, - af_tuple.3 as breakdown + af_tuple.3 as breakdown, + {self.matched_event_select()} + aggregation_target as aggregation_target FROM {{inner_event_query}} GROUP BY aggregation_target{breakdown_prop} """, @@ -100,6 +129,10 @@ def get_query(self) -> ast.SelectQuery: ) # This is necessary so clickhouse doesn't truncate timezone information when passing datetimes to and from python inner_select.settings = HogQLQuerySettings(date_time_output_format="iso", date_time_input_format="best_effort") + return inner_select + + def get_query(self) -> ast.SelectQuery: + inner_select = self._inner_aggregation_query() conversion_rate_expr = ( "if(reached_from_step_count > 0, round(reached_to_step_count / reached_from_step_count * 100, 2), 0)" @@ -163,3 +196,62 @@ def get_query(self) -> ast.SelectQuery: {"fill_query": fill_query, "inner_select": inner_select}, ) return cast(ast.SelectQuery, s) + + def _matching_events(self): + if ( + hasattr(self.context, "actorsQuery") + and self.context.actorsQuery is not None + and self.context.actorsQuery.includeRecordings + ): + return [ast.Alias(alias="matching_events", expr=ast.Field(chain=["matching_events"]))] + return [] + + def actor_query( + self, + extra_fields: Optional[list[str]] = None, + ) -> ast.SelectQuery: + team, actorsQuery = self.context.team, self.context.actorsQuery + + if actorsQuery is None: + raise ValidationError("No actors query present.") + + # At this time, we do not support self.dropOff (we don't use it anywhere in the frontend) + if actorsQuery.funnelTrendsDropOff is None: + raise ValidationError(f"Actors parameter `funnelTrendsDropOff` must be provided for funnel trends persons!") + + if actorsQuery.funnelTrendsEntrancePeriodStart is None: + raise ValidationError( + f"Actors parameter `funnelTrendsEntrancePeriodStart` must be provided funnel trends persons!" + ) + + entrancePeriodStart = relative_date_parse(actorsQuery.funnelTrendsEntrancePeriodStart, team.timezone_info) + if entrancePeriodStart is None: + raise ValidationError( + f"Actors parameter `funnelTrendsEntrancePeriodStart` must be a valid relative date string!" + ) + + select: list[ast.Expr] = [ + ast.Alias(alias="actor_id", expr=ast.Field(chain=["aggregation_target"])), + *self._matching_events(), + *([ast.Field(chain=[field]) for field in extra_fields or []]), + ] + select_from = ast.JoinExpr(table=self._inner_aggregation_query()) + + where = ast.And( + exprs=[ + parse_expr("success_bool != 1") if actorsQuery.funnelTrendsDropOff else parse_expr("success_bool = 1"), + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=parse_expr("entrance_period_start"), + right=ast.Constant(value=entrancePeriodStart), + ), + ] + ) + order_by = [ast.OrderExpr(expr=ast.Field(chain=["aggregation_target"]))] + + return ast.SelectQuery( + select=select, + select_from=select_from, + order_by=order_by, + where=where, + ) diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py index a9a3ef9e3f5c4..3d55d89aa05ff 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py @@ -3,28 +3,13 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.funnels.base import FunnelBase -from posthog.schema import BreakdownType, BreakdownAttributionType, StepOrderValue +from posthog.schema import BreakdownType, BreakdownAttributionType from posthog.utils import DATERANGE_MAP TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S" HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y" -# This is used to reduce the number of events we look at in strict funnels -# We remove a non-matching event if there was already one before it (that don't have the same timestamp) -# arrayRotateRight turns [1,2,3] into [3,1,2] -# For some reason, this uses much less memory than using indexing in clickhouse to check the previous element -def udf_event_array_filter(funnelOrderType: StepOrderValue | None): - if funnelOrderType == "strict": - return f""" - arrayFilter( - (x, x2) -> not (empty(x.4) and empty(x2.4) and x.3 == x2.3 and x.1 > x2.1), - events_array, - arrayRotateRight(events_array, 1)) - """ - return "events_array" - - class FunnelUDF(FunnelBase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -35,23 +20,25 @@ def __init__(self, *args, **kwargs): if property not in self._extra_event_properties: self._extra_event_properties.append(property) - # I think I can delete this - def get_step_counts_query(self): - max_steps = self.context.max_steps - return self._get_step_counts_query( - outer_select=[ - *self._get_matching_event_arrays(max_steps), - ], - inner_select=[ - *self._get_matching_events(max_steps), - ], - ) - def conversion_window_limit(self) -> int: return int( self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds() ) + def matched_event_arrays_selects(self): + # We use matched events to get timestamps for the funnel as well as recordings + if self._include_matched_events() or self.context.includePrecedingTimestamp or self.context.includeTimestamp: + return """ + af_tuple.4 as matched_event_uuids_array_array, + groupArray(tuple(timestamp, uuid, $session_id, $window_id)) as user_events, + mapFromArrays(arrayMap(x -> x.2, user_events), user_events) as user_events_map, + arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) as matched_events_array, + """ + return "" + + def udf_event_array_filter(self): + return self._udf_event_array_filter(1, 3, 4) + # This is the function that calls the UDF # This is used by both the query itself and the actors query def _inner_aggregation_query(self): @@ -90,38 +77,28 @@ def _inner_aggregation_query(self): breakdown_attribution_string = f"{self.context.breakdownAttributionType}{f'_{self.context.funnelsFilter.breakdownAttributionValue}' if self.context.breakdownAttributionType == BreakdownAttributionType.STEP else ''}" - def matched_event_arrays_selects(): - # We use matched events to get timestamps for the funnel as well as recordings - if ( - self._include_matched_events() - or self.context.includePrecedingTimestamp - or self.context.includeTimestamp - ): - return """ - af_tuple.4 as matched_event_uuids_array_array, - groupArray(tuple(timestamp, uuid, $session_id, $window_id)) as user_events, - mapFromArrays(arrayMap(x -> x.2, user_events), user_events) as user_events_map, - arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) as matched_events_array, - """ - return "" - inner_select = parse_select( f""" SELECT - arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), uuid, {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) as events_array, + arraySort(t -> t.1, groupArray(tuple( + toFloat(timestamp), + uuid, + {prop_selector}, + arrayFilter((x) -> x != 0, [{steps}{exclusions}]) + ))) as events_array, arrayJoin({fn}( {self.context.max_steps}, {self.conversion_window_limit()}, '{breakdown_attribution_string}', '{self.context.funnelsFilter.funnelOrderType}', {prop_vals}, - {udf_event_array_filter(self.context.funnelsFilter.funnelOrderType)} + {self.udf_event_array_filter()} )) as af_tuple, af_tuple.1 as step_reached, af_tuple.1 + 1 as steps, -- Backward compatibility af_tuple.2 as breakdown, af_tuple.3 as timings, - {matched_event_arrays_selects()} + {self.matched_event_arrays_selects()} aggregation_target FROM {{inner_event_query}} GROUP BY aggregation_target{breakdown_prop} diff --git a/posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py b/posthog/hogql_queries/insights/funnels/funnel_unordered_actors.py similarity index 100% rename from posthog/hogql_queries/insights/funnels/funnel_unordered_persons.py rename to posthog/hogql_queries/insights/funnels/funnel_unordered_actors.py diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr index fc9dcbeb30399..46d4cbd700ebb 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr @@ -642,71 +642,8 @@ max_bytes_before_external_group_by=0 ''' # --- +<<<<<<< HEAD # name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings - ''' - SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1, - countIf(ifNull(equals(steps, 2), 0)) AS step_2, - avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time, - median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - avg(step_1_conversion_time) AS step_1_average_conversion_time_inner, - median(step_1_conversion_time) AS step_1_median_conversion_time_inner - FROM - (SELECT aggregation_target AS aggregation_target, - steps AS steps, - max(steps) OVER (PARTITION BY aggregation_target) AS max_steps, - step_1_conversion_time AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - latest_1 AS latest_1, - if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1) AS steps, - if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time - FROM - (SELECT aggregation_target AS aggregation_target, - timestamp AS timestamp, - step_0 AS step_0, - latest_0 AS latest_0, - step_1 AS step_1, - min(latest_1) OVER (PARTITION BY aggregation_target - ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1 - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, - if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1 - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC')))))) - WHERE ifNull(equals(step_0, 1), 0))) - GROUP BY aggregation_target, - steps - HAVING ifNull(equals(steps, max(max_steps)), isNull(steps) - and isNull(max(max_steps)))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=23622320128, - allow_experimental_analyzer=1 - ''' -# --- -# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.1 ''' SELECT persons.id, persons.id AS id, @@ -883,7 +820,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.2 +# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.1 ''' SELECT DISTINCT session_replay_events.session_id AS session_id FROM session_replay_events @@ -897,7 +834,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.3 +# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.2 ''' SELECT persons.id, persons.id AS id, @@ -1074,7 +1011,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.4 +# name: TestFunnelCorrelationActors.test_strict_funnel_correlation_with_recordings.3 ''' SELECT DISTINCT session_replay_events.session_id AS session_id FROM session_replay_events @@ -1088,3 +1025,5 @@ max_bytes_before_external_group_by=0 ''' # --- +======= +>>>>>>> origin/master diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr index 2490d1b6aa554..3a16355324cb4 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr @@ -26,7 +26,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -77,7 +81,11 @@ JOIN (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1 FROM events AS e @@ -149,7 +157,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -201,7 +213,11 @@ JOIN (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[3][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1, if(equals(e.event, 'insight updated'), 1, 0) AS step_2 FROM events AS e @@ -265,7 +281,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -319,7 +339,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1 FROM events AS e @@ -389,8 +413,8 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) + and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -429,121 +453,6 @@ ''' # --- # name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.1 - ''' - SELECT persons.id, - persons.id AS id, - source.matching_events AS matching_events - FROM - (SELECT funnel_actors.actor_id AS actor_id, - any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, - matched_events_array[plus(step_reached, 1)] AS matching_events, - (matched_events_array[1][1]).1 AS timestamp, - nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, - (matched_events_array[1][1]).1 AS first_timestamp, - steps AS steps, - final_timestamp, - first_timestamp - FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, - af_tuple.1 AS step_reached, - plus(af_tuple.1, 1) AS steps, - af_tuple.2 AS breakdown, - af_tuple.3 AS timings, - af_tuple.4 AS matched_event_uuids_array_array, - groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, - mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, - arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, - aggregation_target AS aggregation_target - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, - if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, - e.uuid AS uuid, - e.`$session_id` AS `$session_id`, - e.`$window_id` AS `$window_id`, - if(equals(e.event, '$pageview'), 1, 0) AS step_0, - if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1 - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, - replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__person ON equals(if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), e__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__person.properties___foo, 'bar'), 0))) - GROUP BY aggregation_target - HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) - WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source - INNER JOIN - (SELECT person.id AS id - FROM person - WHERE and(equals(person.team_id, 2), in(id, - (SELECT source.actor_id AS actor_id - FROM - (SELECT funnel_actors.actor_id AS actor_id, any(funnel_actors.matching_events) AS matching_events - FROM - (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp - FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target - FROM - (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1 - FROM events AS e - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - LEFT JOIN - (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'foo'), ''), 'null'), '^"|"$', '') AS properties___foo - FROM person - WHERE and(equals(person.team_id, 2), ifNull(in(tuple(person.id, person.version), - (SELECT person.id AS id, max(person.version) AS version - FROM person - WHERE equals(person.team_id, 2) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__person ON equals(if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id), e__person.id) - WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-01-08 23:59:59.999999', 6, 'UTC'))), ifNull(equals(e__person.properties___foo, 'bar'), 0))) - GROUP BY aggregation_target - HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) - WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors - WHERE ifNull(equals(funnel_actors.steps, 2), 0) - GROUP BY funnel_actors.actor_id - ORDER BY funnel_actors.actor_id ASC) AS source))) - GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.id ASC - LIMIT 101 - OFFSET 0 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 - ''' -# --- -# name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.2 ''' SELECT DISTINCT session_replay_events.session_id AS session_id FROM session_replay_events @@ -557,7 +466,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.3 +# name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.2 ''' SELECT persons.id, persons.id AS id, @@ -576,8 +485,10 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) + arrayJoin(aggregate_funnel_array_v1(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) + and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -631,8 +542,10 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v1(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) + and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1 FROM events AS e @@ -672,7 +585,7 @@ max_bytes_before_external_group_by=0 ''' # --- -# name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.4 +# name: TestFunnelCorrelationsActorsUDF.test_strict_funnel_correlation_with_recordings.3 ''' SELECT DISTINCT session_replay_events.session_id AS session_id FROM session_replay_events diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr index b78a92b863f56..80f105f454636 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr @@ -22,7 +22,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -70,7 +74,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -129,7 +137,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -190,7 +202,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -249,7 +265,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -303,7 +323,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -376,7 +400,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -430,7 +458,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -503,7 +535,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -557,7 +593,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -630,7 +670,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -684,7 +728,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -757,7 +805,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -818,7 +870,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -877,7 +933,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -931,7 +991,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -1004,7 +1068,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1058,7 +1126,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -1131,7 +1203,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1185,7 +1261,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -1258,7 +1338,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1312,7 +1396,11 @@ FROM (SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, 'user signed up'), 1, 0) AS step_0, if(equals(e.event, 'paid'), 1, 0) AS step_1 FROM events AS e @@ -1387,7 +1475,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1430,7 +1522,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1484,7 +1580,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1527,7 +1627,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1576,7 +1680,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1617,7 +1725,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1668,7 +1780,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1731,7 +1847,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1794,7 +1914,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1857,7 +1981,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1918,7 +2046,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1967,7 +2099,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2026,7 +2162,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2089,7 +2229,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2150,7 +2294,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2191,7 +2339,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2242,7 +2394,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2305,7 +2461,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2368,7 +2528,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2431,7 +2595,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2492,7 +2660,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2541,7 +2713,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2600,7 +2776,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2663,7 +2843,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2727,7 +2911,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2780,7 +2968,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2830,7 +3022,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2900,7 +3096,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -2970,7 +3170,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3040,7 +3244,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3112,7 +3320,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3165,7 +3377,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3217,7 +3433,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3270,7 +3490,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3320,7 +3544,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3390,7 +3618,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3460,7 +3692,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3530,7 +3766,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3602,7 +3842,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3655,7 +3899,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3707,7 +3955,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3760,7 +4012,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3810,7 +4066,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3880,7 +4140,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -3950,7 +4214,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4020,7 +4288,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4092,7 +4364,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4145,7 +4421,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4197,7 +4477,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4250,7 +4534,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4300,7 +4588,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4370,7 +4662,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4440,7 +4736,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4510,7 +4810,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4582,7 +4886,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4635,7 +4943,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4687,7 +4999,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4740,7 +5056,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4790,7 +5110,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4860,7 +5184,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -4930,7 +5258,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -5000,7 +5332,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -5072,7 +5408,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -5125,7 +5465,11 @@ first_timestamp FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr index 42c71c0354c39..20142f3e1724f 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr @@ -9,7 +9,11 @@ matched_events_array[1] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -83,7 +87,11 @@ matched_events_array[2] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -157,7 +165,11 @@ matched_events_array[2] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr index 118e756719797..31e930b45c9b9 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr @@ -9,8 +9,11 @@ matched_events_array[1] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -84,8 +87,11 @@ matched_events_array[2] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -159,8 +165,11 @@ matched_events_array[2] AS matching_events FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr index d808a2bf566d9..189149c6a8a49 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -15,8 +15,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -85,8 +88,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -162,8 +168,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -237,8 +246,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -315,8 +327,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -393,8 +408,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3) - and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr new file mode 100644 index 0000000000000..eb1520cb66926 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr @@ -0,0 +1,520 @@ +# serializer version: 1 +# name: TestFunnelTrendsActors.test_funnel_trend_persons_returns_recordings + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + step_1_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE ifNull(greaterOrEquals(steps_completed, 2), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActors.test_funnel_trend_persons_returns_recordings.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1b'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsActors.test_funnel_trend_persons_with_drop_off + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + final_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE and(ifNull(greaterOrEquals(steps_completed, 1), 0), ifNull(less(steps_completed, 3), 0)) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActors.test_funnel_trend_persons_with_drop_off.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1a'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsActors.test_funnel_trend_persons_with_no_to_step + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + final_matching_events AS matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + toStartOfDay(timestamp) AS entrance_period_start, + max(steps) AS steps_completed, + groupArray(10)(step_0_matching_event) AS step_0_matching_events, + groupArray(10)(step_1_matching_event) AS step_1_matching_events, + groupArray(10)(step_2_matching_event) AS step_2_matching_events, + groupArray(10)(final_matching_event) AS final_matching_events + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + latest_2 AS latest_2, + uuid_2 AS uuid_2, + `$session_id_2` AS `$session_id_2`, + `$window_id_2` AS `$window_id_2`, + if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), 2, 1)) AS steps, + if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(toTimeZone(latest_0, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time, + if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(toTimeZone(latest_1, 'UTC'), toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time, + tuple(latest_0, uuid_0, `$session_id_0`, `$window_id_0`) AS step_0_matching_event, + tuple(latest_1, uuid_1, `$session_id_1`, `$window_id_1`) AS step_1_matching_event, + tuple(latest_2, uuid_2, `$session_id_2`, `$window_id_2`) AS step_2_matching_event, + if(isNull(latest_0), tuple(NULL, NULL, NULL, NULL), if(isNull(latest_1), step_0_matching_event, if(isNull(latest_2), step_1_matching_event, step_2_matching_event))) AS final_matching_event + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + latest_1 AS latest_1, + uuid_1 AS uuid_1, + `$session_id_1` AS `$session_id_1`, + `$window_id_1` AS `$window_id_1`, + step_2 AS step_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, uuid_2) AS uuid_2, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$session_id_2`) AS `$session_id_2`, + if(ifNull(less(latest_2, latest_1), 0), NULL, `$window_id_2`) AS `$window_id_2` + FROM + (SELECT aggregation_target AS aggregation_target, + timestamp AS timestamp, + step_0 AS step_0, + latest_0 AS latest_0, + uuid_0 AS uuid_0, + `$session_id_0` AS `$session_id_0`, + `$window_id_0` AS `$window_id_0`, + step_1 AS step_1, + min(latest_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1, + last_value(uuid_1) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_1, + last_value(`$session_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_1`, + last_value(`$window_id_1`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_1`, + step_2 AS step_2, + min(latest_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2, + last_value(uuid_2) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS uuid_2, + last_value(`$session_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$session_id_2`, + last_value(`$window_id_2`) OVER (PARTITION BY aggregation_target + ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS `$window_id_2` + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0, + if(ifNull(equals(step_0, 1), 0), uuid, NULL) AS uuid_0, + if(ifNull(equals(step_0, 1), 0), e.`$session_id`, NULL) AS `$session_id_0`, + if(ifNull(equals(step_0, 1), 0), e.`$window_id`, NULL) AS `$window_id_0`, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1, + if(ifNull(equals(step_1, 1), 0), uuid, NULL) AS uuid_1, + if(ifNull(equals(step_1, 1), 0), e.`$session_id`, NULL) AS `$session_id_1`, + if(ifNull(equals(step_1, 1), 0), e.`$window_id`, NULL) AS `$window_id_1`, + if(equals(e.event, 'step three'), 1, 0) AS step_2, + if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2, + if(ifNull(equals(step_2, 1), 0), uuid, NULL) AS uuid_2, + if(ifNull(equals(step_2, 1), 0), e.`$session_id`, NULL) AS `$session_id_2`, + if(ifNull(equals(step_2, 1), 0), e.`$window_id`, NULL) AS `$window_id_2` + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))) + WHERE ifNull(equals(step_0, 1), 0)) + WHERE ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0) + GROUP BY aggregation_target, + entrance_period_start) + WHERE ifNull(greaterOrEquals(steps_completed, 3), 0) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActors.test_funnel_trend_persons_with_no_to_step.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1c'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr new file mode 100644 index 0000000000000..297cbec2c218e --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr @@ -0,0 +1,229 @@ +# serializer version: 1 +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_returns_recordings + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + matching_events AS matching_events + FROM + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown, + groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, + mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, + [user_events_map[af_tuple.4]] AS matching_events, + aggregation_target AS aggregation_target + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + e.`$session_id` AS `$session_id`, + e.`$window_id` AS `$window_id`, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') + WHERE and(ifNull(equals(success_bool, 1), 0), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_returns_recordings.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1b'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_with_drop_off + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + matching_events AS matching_events + FROM + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown, + groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, + mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, + [user_events_map[af_tuple.4]] AS matching_events, + aggregation_target AS aggregation_target + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + e.`$session_id` AS `$session_id`, + e.`$window_id` AS `$window_id`, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') + WHERE and(ifNull(notEquals(success_bool, 1), 1), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_with_drop_off.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1a'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_with_no_to_step + ''' + SELECT persons.id, + persons.id AS id, + source.matching_events AS matching_events + FROM + (SELECT aggregation_target AS actor_id, + matching_events AS matching_events + FROM + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, + toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start, + af_tuple.2 AS success_bool, + af_tuple.3 AS breakdown, + groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, + mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, + [user_events_map[af_tuple.4]] AS matching_events, + aggregation_target AS aggregation_target + FROM + (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, + if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, + e.`$session_id` AS `$session_id`, + e.`$window_id` AS `$window_id`, + if(equals(e.event, 'step one'), 1, 0) AS step_0, + if(equals(e.event, 'step two'), 1, 0) AS step_1, + if(equals(e.event, 'step three'), 1, 0) AS step_2 + FROM events AS e + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, + person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 2) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) + WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2021-05-07 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('step one', 'step three', 'step two'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0)))) + GROUP BY aggregation_target SETTINGS date_time_output_format='iso', + date_time_input_format='best_effort') + WHERE and(ifNull(equals(success_bool, 1), 0), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) + ORDER BY aggregation_target ASC) AS source + INNER JOIN + (SELECT person.id AS id + FROM person + WHERE equals(person.team_id, 2) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) + ORDER BY persons.id ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=1 + ''' +# --- +# name: TestFunnelTrendsActorsUDF.test_funnel_trend_persons_with_no_to_step.1 + ''' + SELECT DISTINCT session_replay_events.session_id AS session_id + FROM session_replay_events + WHERE and(equals(session_replay_events.team_id, 2), ifNull(greaterOrEquals(toTimeZone(session_replay_events.min_first_timestamp, 'UTC'), minus(toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC'), toIntervalDay(21))), 0), in(session_replay_events.session_id, ['s1c'])) + LIMIT 100 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr index 043f02570c02f..7583494134bf7 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -7,14 +7,20 @@ if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, data.breakdown AS prop FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start, af_tuple.2 AS success_bool, - af_tuple.3 AS breakdown + af_tuple.3 AS breakdown, + aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(equals(e.event, 'step three'), 1, 0) AS step_2 @@ -53,14 +59,20 @@ if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, data.breakdown AS prop FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'US/Pacific')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'US/Pacific')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, toTimeZone(toDateTime(toUInt64(af_tuple.1), 'US/Pacific'), 'US/Pacific') AS entrance_period_start, af_tuple.2 AS success_bool, - af_tuple.3 AS breakdown + af_tuple.3 AS breakdown, + aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'US/Pacific') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(equals(e.event, 'step three'), 1, 0) AS step_2 @@ -99,14 +111,20 @@ if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate, data.breakdown AS prop FROM - (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfWeek(timestamp, 0), 'UTC')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfWeek(timestamp, 0), 'UTC')), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, + arrayJoin(aggregate_funnel_array_trends_v2(0, 3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.5), 1), 0), ifNull(equals(x.5, x_before.5), isNull(x.5) + and isNull(x_before.5)), ifNull(equals(x.5, x_after.5), isNull(x.5) + and isNull(x_after.5)), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start, af_tuple.2 AS success_bool, - af_tuple.3 AS breakdown + af_tuple.3 AS breakdown, + aggregation_target AS aggregation_target FROM (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, + e.uuid AS uuid, if(equals(e.event, 'step one'), 1, 0) AS step_0, if(equals(e.event, 'step two'), 1, 0) AS step_1, if(equals(e.event, 'step three'), 1, 0) AS step_2 diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr index d7da60dfba80d..2d23231c6e378 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -20,7 +20,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 15, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -69,7 +73,11 @@ (SELECT aggregation_target AS actor_id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 15, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 15, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -146,7 +154,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -221,7 +233,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -283,7 +299,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -343,7 +363,11 @@ (SELECT aggregation_target AS actor_id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -411,7 +435,11 @@ (SELECT aggregation_target AS actor_id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -479,7 +507,11 @@ (SELECT aggregation_target AS actor_id FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(3, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -562,7 +594,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -619,7 +655,11 @@ breakdown AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', [[]], arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -673,7 +713,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -742,7 +786,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'step_1', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -818,7 +866,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, - arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_array_v2(2, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -892,7 +944,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -969,7 +1025,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, @@ -1046,7 +1106,11 @@ if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop FROM (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array, - arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), events_array)) AS af_tuple, + arrayJoin(aggregate_funnel_v2(3, 1209600, 'first_touch', 'ordered', groupUniqArray(prop), arrayFilter((x, x_before, x_after) -> not(and(ifNull(lessOrEquals(length(x.4), 1), 0), ifNull(equals(x.4, x_before.4), isNull(x.4) + and isNull(x_before.4)), ifNull(equals(x.4, x_after.4), isNull(x.4) + and isNull(x_after.4)), ifNull(equals(x.3, x_before.3), isNull(x.3) + and isNull(x_before.3)), ifNull(equals(x.3, x_after.3), isNull(x.3) + and isNull(x_after.3)), ifNull(greater(x.1, x_before.1), 0), ifNull(less(x.1, x_after.1), 0))), events_array, arrayRotateRight(events_array, 1), arrayRotateLeft(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py index 44b92a5579b4a..8608bc75b59a7 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation.py @@ -1306,11 +1306,6 @@ def test_correlation_with_multiple_properties(self): timestamp="2020-01-04T14:00:00Z", ) - result, _ = self._get_events_for_filters( - filters, - funnelCorrelationType=FunnelCorrelationResultsType.PROPERTIES, - funnelCorrelationNames=["$browser", "$nice"], - ) result, _ = self._get_events_for_filters( filters, funnelCorrelationType=FunnelCorrelationResultsType.PROPERTIES, diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation_actors.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation_actors.py index 594d075b426d5..6fb67421cafa2 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation_actors.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_correlation_actors.py @@ -1,5 +1,6 @@ from typing import Any, Optional, cast from datetime import datetime, timedelta +from unittest import skip from uuid import UUID from django.utils import timezone @@ -7,7 +8,6 @@ from posthog.constants import INSIGHT_FUNNELS from posthog.hogql_queries.actors_query_runner import ActorsQueryRunner -from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query from posthog.models.team.team import Team from posthog.schema import ( @@ -499,6 +499,7 @@ def test_funnel_correlation_on_properties_with_recordings(self): @snapshot_clickhouse_queries @freeze_time("2021-01-02 00:00:00.000Z") + @skip("Works locally and works after you tmate onto github actions and run it, but fails in CI") def test_strict_funnel_correlation_with_recordings(self): # First use that successfully completes the strict funnel p1 = _create_person(distinct_ids=["user_1"], team=self.team, properties={"foo": "bar"}) @@ -577,8 +578,6 @@ def test_strict_funnel_correlation_with_recordings(self): {"id": "insight analyzed", "order": 1}, ], } - query = cast(FunnelsQuery, filter_to_query(filters)) - results = FunnelsQueryRunner(query=query, team=self.team).calculate().results results = get_actors( filters, diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py index 6b6eb7d0f06db..5cc745e382e3f 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py @@ -7,7 +7,6 @@ from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType from posthog.hogql.constants import HogQLGlobalSettings, MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY from posthog.hogql.query import execute_hogql_query -from posthog.hogql_queries.insights.funnels.funnel_udf import udf_event_array_filter from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import ( BaseTestFunnelStrictStepsBreakdown, @@ -22,7 +21,6 @@ BreakdownFilter, FunnelsFilter, BreakdownAttributionType, - StepOrderValue, ) from posthog.test.base import _create_person, _create_event @@ -58,7 +56,7 @@ def test_redundant_event_filtering_strict_funnel(self): runner = FunnelsQueryRunner(query=query, team=self.team) inner_aggregation_query = runner.funnel_class._inner_aggregation_query() inner_aggregation_query.select.append( - parse_expr(f"{udf_event_array_filter(StepOrderValue.STRICT)} AS filtered_array") + parse_expr(f"{runner.funnel_class.udf_event_array_filter()} AS filtered_array") ) inner_aggregation_query.having = None response = execute_hogql_query( @@ -71,8 +69,8 @@ def test_redundant_event_filtering_strict_funnel(self): allow_experimental_analyzer=True, ), ) - # Make sure the events have been condensed down to one - self.assertEqual(1, len(response.results[0][-1])) + # Make sure the events have been condensed down to two + self.assertEqual(2, len(response.results[0][-1])) def test_different_prop_val_in_strict_filter(self): funnels_query = FunnelsQuery( diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors.py similarity index 97% rename from posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py rename to posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors.py index f112fb6c84216..84c7d5bcd5885 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_persons.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors.py @@ -31,7 +31,9 @@ @freeze_time("2021-05-01") -class TestFunnelTrendsPersons(ClickhouseTestMixin, APIBaseTest): +class BaseTestFunnelTrendsActors(ClickhouseTestMixin, APIBaseTest): + __test__ = False + @snapshot_clickhouse_queries def test_funnel_trend_persons_returns_recordings(self): persons = journeys_for( @@ -169,3 +171,7 @@ def test_funnel_trend_persons_with_drop_off(self): [next(iter(results[0][2]))["session_id"]], ["s1a"], ) + + +class TestFunnelTrendsActors(BaseTestFunnelTrendsActors): + __test__ = True diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors_udf.py new file mode 100644 index 0000000000000..1d6fef8766290 --- /dev/null +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_actors_udf.py @@ -0,0 +1,12 @@ +from unittest.mock import Mock, patch + + +from posthog.hogql_queries.insights.funnels.test.test_funnel_trends_actors import BaseTestFunnelTrendsActors + + +@patch( + "posthoganalytics.feature_enabled", + new=Mock(side_effect=lambda key, *args, **kwargs: key == "insight-funnels-use-udf-trends"), +) +class TestFunnelTrendsActorsUDF(BaseTestFunnelTrendsActors): + __test__ = True diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py index d341f7c272be3..1b4f3e487003a 100644 --- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py +++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py @@ -2,7 +2,10 @@ from typing import cast from unittest.mock import patch, Mock +from hogql_parser import parse_expr from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType +from posthog.hogql.constants import HogQLGlobalSettings, MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY +from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner from posthog.hogql_queries.insights.funnels.test.test_funnel_trends import BaseTestFunnelTrends from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query @@ -27,6 +30,52 @@ class TestFunnelTrendsUDF(BaseTestFunnelTrends): __test__ = True + def test_redundant_event_filtering(self): + filters = { + "insight": INSIGHT_FUNNELS, + "date_from": "-14d", + "funnel_viz_type": "trends", + "interval": "day", + "events": [ + {"id": "$pageview", "order": 1}, + {"id": "insight viewed", "order": 2}, + ], + } + + _create_person( + distinct_ids=["many_other_events"], + team_id=self.team.pk, + properties={"test": "okay"}, + ) + now = datetime.datetime.now() + for i in range(10): + _create_event( + team=self.team, + event="$pageview", + distinct_id="many_other_events", + timestamp=now - datetime.timedelta(days=11 + i), + ) + + query = cast(FunnelsQuery, filter_to_query(filters)) + runner = FunnelsQueryRunner(query=query, team=self.team) + inner_aggregation_query = runner.funnel_class._inner_aggregation_query() + inner_aggregation_query.select.append( + parse_expr(f"{runner.funnel_class.udf_event_array_filter()} AS filtered_array") + ) + inner_aggregation_query.having = None + response = execute_hogql_query( + query_type="FunnelsQuery", + query=inner_aggregation_query, + team=self.team, + settings=HogQLGlobalSettings( + # Make sure funnel queries never OOM + max_bytes_before_external_group_by=MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY, + allow_experimental_analyzer=True, + ), + ) + # Make sure the events have been condensed down to two + self.assertEqual(2, len(response.results[0][-1])) + def test_assert_udf_flag_is_working(self): filters = { "insight": INSIGHT_FUNNELS, diff --git a/posthog/hogql_queries/insights/funnels/utils.py b/posthog/hogql_queries/insights/funnels/utils.py index b071bcd7a1d86..e0de04359f6c2 100644 --- a/posthog/hogql_queries/insights/funnels/utils.py +++ b/posthog/hogql_queries/insights/funnels/utils.py @@ -45,16 +45,23 @@ def get_funnel_actor_class(funnelsFilter: FunnelsFilter, use_udf=False): FunnelStrictActors, FunnelUnorderedActors, FunnelTrendsActors, + FunnelTrendsUDF, ) - if funnelsFilter.funnelVizType == FunnelVizType.TRENDS: - return FunnelTrendsActors if funnelsFilter.funnelOrderType == StepOrderValue.UNORDERED: return FunnelUnorderedActors + + if funnelsFilter.funnelVizType == FunnelVizType.TRENDS: + if use_udf: + return FunnelTrendsUDF + return FunnelTrendsActors + if use_udf: return FunnelUDF + if funnelsFilter.funnelOrderType == StepOrderValue.STRICT: return FunnelStrictActors + return FunnelActors diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py index b56632e9de4b5..1aaf3db5d01c7 100644 --- a/posthog/hogql_queries/insights/lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py @@ -181,7 +181,7 @@ def calculate(self) -> LifecycleQueryResponse: action_object = {} label = "{} - {}".format("", val[2]) if isinstance(self.query.series[0], ActionsNode): - action = Action.objects.get(pk=int(self.query.series[0].id), team=self.team) + action = Action.objects.get(pk=int(self.query.series[0].id), team__project_id=self.team.project_id) label = "{} - {}".format(action.name, val[2]) action_object = { "id": str(action.pk), @@ -248,7 +248,7 @@ def event_filter(self) -> ast.Expr: with self.timings.measure("series_filters"): for serie in self.query.series or []: if isinstance(serie, ActionsNode): - action = Action.objects.get(pk=int(serie.id), team=self.team) + action = Action.objects.get(pk=int(serie.id), team__project_id=self.team.project_id) event_filters.append(action_to_expr(action)) elif isinstance(serie, EventsNode): if serie.event is not None: diff --git a/posthog/hogql_queries/insights/retention_query_runner.py b/posthog/hogql_queries/insights/retention_query_runner.py index 27db6819148e1..9f96ef654199b 100644 --- a/posthog/hogql_queries/insights/retention_query_runner.py +++ b/posthog/hogql_queries/insights/retention_query_runner.py @@ -87,7 +87,7 @@ def filter_timestamp(self) -> ast.Expr: def _get_events_for_entity(self, entity: RetentionEntity) -> list[str | None]: if entity.type == EntityType.ACTIONS and entity.id: - action = Action.objects.get(pk=int(entity.id)) + action = Action.objects.get(pk=int(entity.id), team__project_id=self.team.project_id) return action.get_step_events() return [entity.id] if isinstance(entity.id, str) else [None] diff --git a/posthog/hogql_queries/insights/stickiness_query_runner.py b/posthog/hogql_queries/insights/stickiness_query_runner.py index 4fd7b096d006d..f0fc017165e8d 100644 --- a/posthog/hogql_queries/insights/stickiness_query_runner.py +++ b/posthog/hogql_queries/insights/stickiness_query_runner.py @@ -276,7 +276,7 @@ def where_clause(self, series_with_extra: SeriesWithExtras) -> ast.Expr: ) elif isinstance(series, ActionsNode): try: - action = Action.objects.get(pk=int(series.id), team=self.team) + action = Action.objects.get(pk=int(series.id), team__project_id=self.team.project_id) filters.append(action_to_expr(action)) except Action.DoesNotExist: # If an action doesn't exist, we want to return no events @@ -331,7 +331,7 @@ def series_event(self, series: EventsNode | ActionsNode | DataWarehouseNode) -> if isinstance(series, ActionsNode): # TODO: Can we load the Action in more efficiently? - action = Action.objects.get(pk=int(series.id), team=self.team) + action = Action.objects.get(pk=int(series.id), team__project_id=self.team.project_id) return action.name def intervals_num(self): diff --git a/posthog/hogql_queries/insights/trends/test/test_trends.py b/posthog/hogql_queries/insights/trends/test/test_trends.py index 7f7977f406bfb..18e0e2b267fda 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends.py @@ -3850,6 +3850,21 @@ def test_action_filtering(self): self.assertEntityResponseEqual(action_response, event_response) + def test_action_filtering_for_action_in_different_env_of_project(self): + sign_up_action, person = self._create_events() + other_team_in_project = Team.objects.create(organization=self.organization, project=self.project) + sign_up_action.team = other_team_in_project + sign_up_action.save() + + action_response = self._run( + Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}]}), + self.team, + ) + event_response = self._run(Filter(team=self.team, data={"events": [{"id": "sign up"}]}), self.team) + self.assertEqual(len(action_response), 1) + + self.assertEntityResponseEqual(action_response, event_response) + @also_test_with_person_on_events_v2 @snapshot_clickhouse_queries def test_action_filtering_with_cohort(self): diff --git a/posthog/hogql_queries/insights/trends/trends_actors_query_builder.py b/posthog/hogql_queries/insights/trends/trends_actors_query_builder.py index 1ce6a5b593f28..84f97da6e0c40 100644 --- a/posthog/hogql_queries/insights/trends/trends_actors_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_actors_query_builder.py @@ -293,7 +293,7 @@ def _event_or_action_where_expr(self) -> ast.Expr | None: if isinstance(self.entity, ActionsNode): # Actions try: - action = Action.objects.get(pk=int(self.entity.id), team=self.team) + action = Action.objects.get(pk=int(self.entity.id), team__project_id=self.team.project_id) return action_to_expr(action) except Action.DoesNotExist: # If an action doesn't exist, we want to return no events diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py index 7a93be2d548b0..ee1165c41c624 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_builder.py +++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py @@ -739,7 +739,7 @@ def _event_or_action_where_expr(self) -> ast.Expr | None: # Actions if isinstance(self.series, ActionsNode): try: - action = Action.objects.get(pk=int(self.series.id), team=self.team) + action = Action.objects.get(pk=int(self.series.id), team__project_id=self.team.project_id) return action_to_expr(action) except Action.DoesNotExist: # If an action doesn't exist, we want to return no events diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 4d7e6ec6198ff..8c490a58684ed 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -641,7 +641,7 @@ def series_event(self, series: Union[EventsNode, ActionsNode, DataWarehouseNode] return series.event if isinstance(series, ActionsNode): # TODO: Can we load the Action in more efficiently? - action = Action.objects.get(pk=int(series.id), team=self.team) + action = Action.objects.get(pk=int(series.id), team__project_id=self.team.project_id) return action.name if isinstance(series, DataWarehouseNode): diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 8d62fabf66c82..664430cc7da04 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -33,6 +33,7 @@ FunnelsQuery, HogQLQuery, HogQLQueryModifiers, + HogQLVariable, InsightActorsQuery, InsightActorsQueryOptions, LifecycleQuery, @@ -365,10 +366,10 @@ def get_query_runner( limit_context=limit_context, ) - if kind == "ExperimentFunnelQuery": - from .experiments.experiment_funnel_query_runner import ExperimentFunnelQueryRunner + if kind == "ExperimentFunnelsQuery": + from .experiments.experiment_funnels_query_runner import ExperimentFunnelsQueryRunner - return ExperimentFunnelQueryRunner( + return ExperimentFunnelsQueryRunner( query=query, team=team, timings=timings, @@ -376,10 +377,10 @@ def get_query_runner( limit_context=limit_context, ) - if kind == "ExperimentTrendQuery": - from .experiments.experiment_trend_query_runner import ExperimentTrendQueryRunner + if kind == "ExperimentTrendsQuery": + from .experiments.experiment_trends_query_runner import ExperimentTrendsQueryRunner - return ExperimentTrendQueryRunner( + return ExperimentTrendsQueryRunner( query=query, team=team, timings=timings, @@ -721,6 +722,20 @@ def _is_stale(self, last_refresh: Optional[datetime], lazy: bool = False) -> boo def _refresh_frequency(self) -> timedelta: return timedelta(minutes=1) + def apply_variable_overrides(self, variable_overrides: list[HogQLVariable]): + """Irreversably update self.query with provided variable overrides.""" + if not hasattr(self.query, "variables") or not self.query.kind == "HogQLQuery" or len(variable_overrides) == 0: + return + + assert isinstance(self.query, HogQLQuery) + + if not self.query.variables: + return + + for variable in variable_overrides: + if self.query.variables.get(variable.variableId): + self.query.variables[variable.variableId] = variable + def apply_dashboard_filters(self, dashboard_filter: DashboardFilter): """Irreversably update self.query with provided dashboard filters.""" if not hasattr(self.query, "properties") or not hasattr(self.query, "dateRange"): diff --git a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr index 14ffed468c757..9a973fc39730b 100644 --- a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr +++ b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr @@ -205,7 +205,7 @@ FROM "posthog_errortrackinggroup" WHERE ("posthog_errortrackinggroup"."team_id" = 2 AND "posthog_errortrackinggroup"."fingerprint" = (ARRAY['SyntaxError', - 'Cannot use ''in'' operator to search for ''wireframes'' in ‹�” ýf�ì½é–"¹’0ø*Lö¹SY A�Ξ÷ԝf + 'Cannot use ''in'' operator to search for ''wireframes'' in ‹�” ýf�ì½é–"¹’0ø*Lö¹SY A�Ξ÷ԝf ˆ�Ø'])::text[]) ''' # --- @@ -506,7 +506,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-10 00:00:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_stack_trace_raw'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0))) + WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-01-10 00:00:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenot')), 0), 0))) GROUP BY fingerprint LIMIT 101 OFFSET 0 SETTINGS readonly=2, @@ -546,7 +546,7 @@ WHERE equals(person.team_id, 2) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), and(or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_stack_trace_raw'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_stack_trace_raw'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0)))) + WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1), and(or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('databasenotfoundX')), 0), 0)), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('clickhouse/client/execute.py')), 0), 0)))) GROUP BY fingerprint LIMIT 101 OFFSET 0 SETTINGS readonly=2, @@ -569,7 +569,7 @@ any(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')) AS exception_type, JSONExtract(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), '[]'), 'Array(String)') AS fingerprint FROM events - WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-10 00:00:00.000000', 6, 'UTC'))), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_stack_trace_raw'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0))) + WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-10 00:00:00.000000', 6, 'UTC'))), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('wireframe')), 0), 0))) GROUP BY fingerprint LIMIT 101 OFFSET 0 SETTINGS readonly=2, @@ -592,7 +592,7 @@ any(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')) AS exception_type, JSONExtract(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_fingerprint'), ''), 'null'), '^"|"$', ''), '[]'), 'Array(String)') AS fingerprint FROM events - WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-10 00:00:00.000000', 6, 'UTC'))), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_stack_trace_raw'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0))) + WHERE and(equals(events.team_id, 2), equals(events.event, '$exception'), and(less(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-11 00:00:00.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-10 00:00:00.000000', 6, 'UTC'))), or(ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_list'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_type'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0), ifNull(greater(position(lower(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_message'), ''), 'null'), '^"|"$', '')), lower('f\0ì½é')), 0), 0))) GROUP BY fingerprint LIMIT 101 OFFSET 0 SETTINGS readonly=2, diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 31d0bb43d337a..022f918e0913e 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -100,7 +100,7 @@ def session_properties(self) -> ast.Expr: @cached_property def conversion_goal_expr(self) -> ast.Expr: if isinstance(self.query.conversionGoal, ActionConversionGoal): - action = Action.objects.get(pk=self.query.conversionGoal.actionId) + action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id) return action_to_expr(action) elif isinstance(self.query.conversionGoal, CustomEventConversionGoal): return ast.CompareOperation( diff --git a/posthog/management/commands/delete_persons_with_no_distinct_ids.py b/posthog/management/commands/delete_persons_with_no_distinct_ids.py new file mode 100644 index 0000000000000..80e52f6e1bde6 --- /dev/null +++ b/posthog/management/commands/delete_persons_with_no_distinct_ids.py @@ -0,0 +1,70 @@ +from django.core.management.base import BaseCommand, CommandError +from django.db import connection + + +class Command(BaseCommand): + help = "Delete person rows that have no associated persondistinctid rows, by team" + + def add_arguments(self, parser): + parser.add_argument("--team-id", default=None, type=int, help="Team ID to migrate from (on this instance)") + parser.add_argument("--dry-run", action="store_false", help="Dry run (default: true)") + + def handle(self, **options): + team_id = options["team_id"] + dry_run = options["dry_run"] + + if not team_id: + raise CommandError("source Team ID is required") + + print("Deleting persons with no distinct ids for team", team_id) # noqa: T201 + + if dry_run: + delete_persons_without_distinct_ids_raw_sql_dry_run(team_id) + else: + delete_persons_without_distinct_ids_raw_sql(team_id) + + +def delete_persons_without_distinct_ids_raw_sql(team_id): + with connection.cursor() as cursor: + cursor.execute( + """ + WITH persons_to_delete AS ( + SELECT p.id + FROM posthog_person p + LEFT JOIN posthog_persondistinctid pd ON p.id = pd.person_id AND p.team_id = pd.team_id + WHERE p.team_id = %s AND pd.id IS NULL + ) + DELETE FROM posthog_person + WHERE id IN (SELECT id FROM persons_to_delete) + RETURNING id; + """, + [team_id], + ) + + deleted_ids = cursor.fetchall() + deleted_count = len(deleted_ids) + + print(f"Deleted {deleted_count} Person objects with no PersonDistinctIds for team {team_id}.") # noqa: T201 + return deleted_count + + +def delete_persons_without_distinct_ids_raw_sql_dry_run(team_id): + with connection.cursor() as cursor: + cursor.execute( + """ + WITH persons_to_delete AS ( + SELECT p.id + FROM posthog_person p + LEFT JOIN posthog_persondistinctid pd ON p.id = pd.person_id AND p.team_id = pd.team_id + WHERE p.team_id = %s AND pd.id IS NULL + ) + SELECT COUNT(*) FROM persons_to_delete; + """, + [team_id], + ) + + deleted_count = cursor.fetchone() + deleted_count = deleted_count[0] if deleted_count else 0 + + print(f"Would have deleted {deleted_count} Person objects with no PersonDistinctIds for team {team_id}.") # noqa: T201 + return deleted_count diff --git a/posthog/management/commands/test_migrations_are_safe.py b/posthog/management/commands/test_migrations_are_safe.py index 41ef0df6f90db..6c7d832e97112 100644 --- a/posthog/management/commands/test_migrations_are_safe.py +++ b/posthog/management/commands/test_migrations_are_safe.py @@ -67,13 +67,18 @@ def validate_migration_sql(sql) -> bool: f"\n\n\033[91mFound a DROP TABLE command. This could lead to unsafe states for the app. Please avoid dropping tables.\nSource: `{operation_sql}`" ) return True - if "CONSTRAINT" in operation_sql and ( - "-- existing-table-constraint-ignore" not in operation_sql + if ( + "CONSTRAINT" in operation_sql + # Ignore for new foreign key columns that are nullable, as their foreign key constraint does not lock + and not re.match(r"ADD COLUMN .+ NULL CONSTRAINT", operation_sql) + and "-- existing-table-constraint-ignore" not in operation_sql and " NOT VALID" not in operation_sql + and " VALIDATE CONSTRAINT " + not in operation_sql # VALIDATE CONSTRAINT is a different, non-locking operation and ( table_being_altered not in tables_created_so_far - or _get_table("ALTER TABLE", operation_sql) not in new_tables - ) # Ignore for brand-new tables + or _get_table("ALTER TABLE", operation_sql) not in new_tables # Ignore for brand-new tables + ) ): print( f"\n\n\033[91mFound a CONSTRAINT command without NOT VALID. This locks tables which causes downtime. " diff --git a/posthog/migrations/0488_alter_user_is_active.py b/posthog/migrations/0488_alter_user_is_active.py new file mode 100644 index 0000000000000..ca3fde9eeecd0 --- /dev/null +++ b/posthog/migrations/0488_alter_user_is_active.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.15 on 2024-10-14 18:21 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0487_team_survey_config"), + ] + + operations = [ + migrations.AlterField( + model_name="user", + name="is_active", + field=models.BooleanField( + default=True, help_text="Unselect this to temporarily disable an account.", verbose_name="active" + ), + ), + ] diff --git a/posthog/migrations/0489_alter_integration_kind.py b/posthog/migrations/0489_alter_integration_kind.py new file mode 100644 index 0000000000000..94b9c06391db8 --- /dev/null +++ b/posthog/migrations/0489_alter_integration_kind.py @@ -0,0 +1,27 @@ +# Generated by Django 4.2.15 on 2024-10-15 10:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0488_alter_user_is_active"), + ] + + operations = [ + migrations.AlterField( + model_name="integration", + name="kind", + field=models.CharField( + choices=[ + ("slack", "Slack"), + ("salesforce", "Salesforce"), + ("hubspot", "Hubspot"), + ("google-pubsub", "Google Pubsub"), + ("google-cloud-storage", "Google Cloud Storage"), + ("google-ads", "Google Ads"), + ], + max_length=20, + ), + ), + ] diff --git a/posthog/migrations/0490_dashboard_variables.py b/posthog/migrations/0490_dashboard_variables.py new file mode 100644 index 0000000000000..dd85f89405e41 --- /dev/null +++ b/posthog/migrations/0490_dashboard_variables.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-10-16 15:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0489_alter_integration_kind"), + ] + + operations = [ + migrations.AddField( + model_name="dashboard", + name="variables", + field=models.JSONField(blank=True, default=dict, null=True), + ), + ] diff --git a/posthog/migrations/0491_alertconfiguration_snoozed_until_and_more.py b/posthog/migrations/0491_alertconfiguration_snoozed_until_and_more.py new file mode 100644 index 0000000000000..d8fa097c43b32 --- /dev/null +++ b/posthog/migrations/0491_alertconfiguration_snoozed_until_and_more.py @@ -0,0 +1,46 @@ +# Generated by Django 4.2.15 on 2024-10-17 09:21 + +from django.db import migrations, models +import posthog.schema + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0490_dashboard_variables"), + ] + + operations = [ + migrations.AddField( + model_name="alertconfiguration", + name="snoozed_until", + field=models.DateTimeField(blank=True, null=True), + ), + migrations.AlterField( + model_name="alertcheck", + name="state", + field=models.CharField( + choices=[ + (posthog.schema.AlertState["FIRING"], posthog.schema.AlertState["FIRING"]), + (posthog.schema.AlertState["NOT_FIRING"], posthog.schema.AlertState["NOT_FIRING"]), + (posthog.schema.AlertState["ERRORED"], posthog.schema.AlertState["ERRORED"]), + (posthog.schema.AlertState["SNOOZED"], posthog.schema.AlertState["SNOOZED"]), + ], + default=posthog.schema.AlertState["NOT_FIRING"], + max_length=10, + ), + ), + migrations.AlterField( + model_name="alertconfiguration", + name="state", + field=models.CharField( + choices=[ + (posthog.schema.AlertState["FIRING"], posthog.schema.AlertState["FIRING"]), + (posthog.schema.AlertState["NOT_FIRING"], posthog.schema.AlertState["NOT_FIRING"]), + (posthog.schema.AlertState["ERRORED"], posthog.schema.AlertState["ERRORED"]), + (posthog.schema.AlertState["SNOOZED"], posthog.schema.AlertState["SNOOZED"]), + ], + default=posthog.schema.AlertState["NOT_FIRING"], + max_length=10, + ), + ), + ] diff --git a/posthog/migrations/0492_team_session_recording_url_trigger_config.py b/posthog/migrations/0492_team_session_recording_url_trigger_config.py new file mode 100644 index 0000000000000..05ec513b2edec --- /dev/null +++ b/posthog/migrations/0492_team_session_recording_url_trigger_config.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.15 on 2024-10-14 08:09 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0491_alertconfiguration_snoozed_until_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="team", + name="session_recording_url_trigger_config", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.JSONField(blank=True, null=True), blank=True, default=list, null=True, size=None + ), + ), + ] diff --git a/posthog/migrations/0493_insightvariable_values.py b/posthog/migrations/0493_insightvariable_values.py new file mode 100644 index 0000000000000..0e278ca593ff2 --- /dev/null +++ b/posthog/migrations/0493_insightvariable_values.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-10-17 10:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0492_team_session_recording_url_trigger_config"), + ] + + operations = [ + migrations.AddField( + model_name="insightvariable", + name="values", + field=models.JSONField(blank=True, null=True), + ), + ] diff --git a/posthog/migrations/0494_team_project_non_null.py b/posthog/migrations/0494_team_project_non_null.py new file mode 100644 index 0000000000000..9faa99d860a5a --- /dev/null +++ b/posthog/migrations/0494_team_project_non_null.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.15 on 2024-10-15 13:04 + +from django.db import migrations, models +import django.db.models.deletion +from django.contrib.postgres.operations import ValidateConstraint + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0493_insightvariable_values"), + ] + + operations = [ + migrations.SeparateDatabaseAndState( + state_operations=[ + # The only difference is lack of null=True + migrations.AlterField( + model_name="team", + name="project", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="teams", + related_query_name="team", + to="posthog.project", + ), + ) + ], + database_operations=[ + # Finishing the job started in 0445_require_team_project_id_not_valid + # This is safe in both Cloud regions, as project_id is not null for any posthog_team record + ValidateConstraint(model_name="team", name="project_id_is_not_null") + ], + ) + ] diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index 3b8489d897645..7cf9595e64983 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -29,6 +29,7 @@ "Insight", "Plugin", "PluginConfig", + "HogFunction", "DataManagement", "EventDefinition", "PropertyDefinition", @@ -149,6 +150,12 @@ class Meta: ] +field_with_masked_contents: dict[ActivityScope, list[str]] = { + "HogFunction": [ + "encrypted_inputs", + ], +} + field_exclusions: dict[ActivityScope, list[str]] = { "Cohort": [ "version", @@ -158,6 +165,10 @@ class Meta: "last_calculation", "errors_calculating", ], + "HogFunction": [ + "bytecode", + "icon_url", + ], "Notebook": [ "text_content", ], @@ -284,35 +295,47 @@ def changes_between( if previous is not None: fields = current._meta.get_fields() if current is not None else [] excluded_fields = field_exclusions.get(model_type, []) + common_field_exclusions - filtered_fields = [f.name for f in fields if f.name not in excluded_fields] + masked_fields = field_with_masked_contents.get(model_type, []) + filtered_fields = [f for f in fields if f.name not in excluded_fields] + filtered_field_names = [f.name for f in filtered_fields] for field in filtered_fields: - left = safely_get_field_value(previous, field) - right = safely_get_field_value(current, field) + field_name = field.name + left = safely_get_field_value(previous, field_name) + right = safely_get_field_value(current, field_name) - if field == "tagged_items": - field = "tags" # Or the UI needs to be coupled to this internal backend naming. + if field_name == "tagged_items": + field_name = "tags" # Or the UI needs to be coupled to this internal backend naming. - if field == "dashboards" and "dashboard_tiles" in filtered_fields: + if field_name == "dashboards" and "dashboard_tiles" in filtered_field_names: # Only process dashboard_tiles when it is present. It supersedes dashboards. continue - if model_type == "Insight" and field == "dashboard_tiles": + if model_type == "Insight" and field_name == "dashboard_tiles": # The API exposes this as dashboards and that's what the activity describers expect. - field = "dashboards" + field_name = "dashboards" + + # if is a django model field, check the empty_values list + left_is_none = left is None or (hasattr(field, "empty_values") and left in field.empty_values) + right_is_none = right is None or (hasattr(field, "empty_values") and right in field.empty_values) + + left_value = "masked" if field_name in masked_fields else left + right_value = "masked" if field_name in masked_fields else right - if left is None and right is not None: - changes.append(Change(type=model_type, field=field, action="created", after=right)) - elif right is None and left is not None: - changes.append(Change(type=model_type, field=field, action="deleted", before=left)) + if left_is_none and right_is_none: + pass # could be {} vs None + elif left_is_none and not right_is_none: + changes.append(Change(type=model_type, field=field_name, action="created", after=right_value)) + elif right_is_none and not left_is_none: + changes.append(Change(type=model_type, field=field_name, action="deleted", before=left_value)) elif left != right: changes.append( Change( type=model_type, - field=field, + field=field_name, action="changed", - before=left, - after=right, + before=left_value, + after=right_value, ) ) diff --git a/posthog/models/alert.py b/posthog/models/alert.py index 8db059a992232..d00425327fd48 100644 --- a/posthog/models/alert.py +++ b/posthog/models/alert.py @@ -1,38 +1,24 @@ from datetime import datetime, UTC, timedelta -from typing import Any, Optional, cast -from dateutil.relativedelta import relativedelta from django.db import models from django.core.exceptions import ValidationError +import pydantic from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import conversion_to_query_based from posthog.models.insight import Insight from posthog.models.utils import UUIDModel, CreatedMetaFields -from posthog.schema import AlertCondition, InsightThreshold, AlertState, AlertCalculationInterval +from posthog.schema import InsightThreshold, AlertState, AlertCalculationInterval ALERT_STATE_CHOICES = [ (AlertState.FIRING, AlertState.FIRING), (AlertState.NOT_FIRING, AlertState.NOT_FIRING), (AlertState.ERRORED, AlertState.ERRORED), + (AlertState.SNOOZED, AlertState.SNOOZED), ] -def alert_calculation_interval_to_relativedelta(alert_calculation_interval: AlertCalculationInterval) -> relativedelta: - match alert_calculation_interval: - case AlertCalculationInterval.HOURLY: - return relativedelta(hours=1) - case AlertCalculationInterval.DAILY: - return relativedelta(days=1) - case AlertCalculationInterval.WEEKLY: - return relativedelta(weeks=1) - case AlertCalculationInterval.MONTHLY: - return relativedelta(months=1) - case _: - raise ValueError(f"Invalid alert calculation interval: {alert_calculation_interval}") - - def are_alerts_supported_for_insight(insight: Insight) -> bool: with conversion_to_query_based(insight): query = insight.query @@ -43,32 +29,6 @@ def are_alerts_supported_for_insight(insight: Insight) -> bool: return True -class ConditionValidator: - def __init__(self, threshold: Optional[InsightThreshold], condition: AlertCondition): - self.threshold = threshold - self.condition = condition - - def validate(self, calculated_value: float) -> list[str]: - validators: Any = [ - self.validate_absolute_threshold, - ] - breaches = [] - for validator in validators: - breaches += validator(calculated_value) - return breaches - - def validate_absolute_threshold(self, calculated_value: float) -> list[str]: - if not self.threshold or not self.threshold.absoluteThreshold: - return [] - - absolute_threshold = self.threshold.absoluteThreshold - if absolute_threshold.lower is not None and calculated_value < absolute_threshold.lower: - return [f"The trend value ({calculated_value}) is below the lower threshold ({absolute_threshold.lower})"] - if absolute_threshold.upper is not None and calculated_value > absolute_threshold.upper: - return [f"The trend value ({calculated_value}) is above the upper threshold ({absolute_threshold.upper})"] - return [] - - class Alert(models.Model): """ @deprecated("AlertConfiguration should be used instead.") @@ -95,11 +55,15 @@ class Threshold(CreatedMetaFields, UUIDModel): configuration = models.JSONField(default=dict) def clean(self): - config = InsightThreshold.model_validate(self.configuration) - if not config or not config.absoluteThreshold: + try: + config = InsightThreshold.model_validate(self.configuration) + except pydantic.ValidationError as e: + raise ValidationError(f"Invalid threshold configuration: {e}") + + if not config or not config.bounds: return - if config.absoluteThreshold.lower is not None and config.absoluteThreshold.upper is not None: - if config.absoluteThreshold.lower > config.absoluteThreshold.upper: + if config.bounds.lower is not None and config.bounds.upper is not None: + if config.bounds.lower > config.bounds.upper: raise ValidationError("Lower threshold must be less than upper threshold") @@ -145,7 +109,10 @@ class AlertConfiguration(CreatedMetaFields, UUIDModel): last_notified_at = models.DateTimeField(null=True, blank=True) last_checked_at = models.DateTimeField(null=True, blank=True) + # UTC time for when next alert check is due next_check_at = models.DateTimeField(null=True, blank=True) + # UTC time until when we shouldn't check alert/notify user + snoozed_until = models.DateTimeField(null=True, blank=True) def __str__(self): return f"{self.name} (Team: {self.team})" @@ -159,75 +126,6 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) - def evaluate_condition(self, calculated_value) -> list[str]: - threshold = InsightThreshold.model_validate(self.threshold.configuration) if self.threshold else None - condition = AlertCondition.model_validate(self.condition) - validator = ConditionValidator(threshold=threshold, condition=condition) - return validator.validate(calculated_value) - - def add_check( - self, *, aggregated_value: Optional[float], error: Optional[dict] = None - ) -> tuple["AlertCheck", list[str], Optional[dict], bool]: - """ - Add a new AlertCheck, managing state transitions and cool down. - - Args: - aggregated_value: result of insight calculation compressed to one number to compare against threshold - error: any error raised while calculating insight value, if present then set state as errored - """ - - targets_notified: dict[str, list[str]] = {} - breaches = [] - notify = False - - if not error: - try: - breaches = self.evaluate_condition(aggregated_value) if aggregated_value is not None else [] - except Exception as err: - # error checking the condition - error = { - "message": f"Error checking alert condition {str(err)}", - } - - if error: - # If the alert is not already errored, notify user - if self.state != AlertState.ERRORED: - self.state = AlertState.ERRORED - notify = True - elif breaches: - # If the alert is not already firing, notify user - if self.state != AlertState.FIRING: - self.state = AlertState.FIRING - notify = True - else: - self.state = AlertState.NOT_FIRING # Set the Alert to not firing if the threshold is no longer met - # TODO: Optionally send a resolved notification when alert goes from firing to not_firing? - - now = datetime.now(UTC) - self.last_checked_at = datetime.now(UTC) - - # IMPORTANT: update next_check_at according to interval - # ensure we don't recheck alert until the next interval is due - self.next_check_at = (self.next_check_at or now) + alert_calculation_interval_to_relativedelta( - cast(AlertCalculationInterval, self.calculation_interval) - ) - - if notify: - self.last_notified_at = now - targets_notified = {"users": list(self.subscribed_users.all().values_list("email", flat=True))} - - alert_check = AlertCheck.objects.create( - alert_configuration=self, - calculated_value=aggregated_value, - condition=self.condition, - targets_notified=targets_notified, - state=self.state, - error=error, - ) - - self.save() - return alert_check, breaches, error, notify - class AlertSubscription(CreatedMetaFields, UUIDModel): user = models.ForeignKey( diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index b6eeac84a8395..1d18d632bd030 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -7,7 +7,7 @@ from django.conf import settings from django.utils import timezone from rest_framework.exceptions import ValidationError - +from posthog.queries.util import PersonPropertiesMode from posthog.clickhouse.client.connection import Workload from posthog.clickhouse.query_tagging import tag_queries from posthog.client import sync_execute @@ -65,6 +65,7 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) ), cohort.team, cohort_pk=cohort.pk, + persons_on_events_mode=cohort.team.person_on_events_mode, ) query, params = query_builder.get_query() @@ -151,6 +152,7 @@ def get_entity_query( team_id: int, group_idx: Union[int, str], hogql_context: HogQLContext, + person_properties_mode: Optional[PersonPropertiesMode] = None, ) -> tuple[str, dict[str, str]]: if event_id: return f"event = %({f'event_{group_idx}'})s", {f"event_{group_idx}": event_id} @@ -161,6 +163,9 @@ def get_entity_query( action=action, prepend="_{}_action".format(group_idx), hogql_context=hogql_context, + person_properties_mode=person_properties_mode + if person_properties_mode + else PersonPropertiesMode.USING_SUBQUERY, ) return action_filter_query, action_params else: diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py index b765c6154f815..83fcdf3484051 100644 --- a/posthog/models/dashboard.py +++ b/posthog/models/dashboard.py @@ -49,6 +49,7 @@ class PrivilegeLevel(models.IntegerChoices): deleted = models.BooleanField(default=False) last_accessed_at = models.DateTimeField(blank=True, null=True) filters = models.JSONField(default=dict) + variables = models.JSONField(default=dict, null=True, blank=True) creation_mode = models.CharField(max_length=16, default="default", choices=CreationMode.choices) restriction_level = models.PositiveSmallIntegerField( default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index cd2bf85143d05..c9a734ef96c8c 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -25,6 +25,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -90,6 +91,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -155,6 +157,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -220,6 +223,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -285,6 +289,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/models/hog_functions/hog_function.py b/posthog/models/hog_functions/hog_function.py index 3435b4e05c640..d9922a83d47f8 100644 --- a/posthog/models/hog_functions/hog_function.py +++ b/posthog/models/hog_functions/hog_function.py @@ -69,6 +69,11 @@ def filter_action_ids(self) -> list[int]: _status: Optional[dict] = None + @property + def type(self) -> str: + # Used in activity logs + return "destination" + @property def status(self) -> dict: if not self.enabled: diff --git a/posthog/models/insight.py b/posthog/models/insight.py index d32c2d1c31ec9..c8e5b0fbc7636 100644 --- a/posthog/models/insight.py +++ b/posthog/models/insight.py @@ -196,9 +196,17 @@ def dashboard_filters( return self.filters def get_effective_query( - self, *, dashboard: Optional[Dashboard], dashboard_filters_override: Optional[dict] = None + self, + *, + dashboard: Optional[Dashboard], + dashboard_filters_override: Optional[dict] = None, + dashboard_variables_override: Optional[dict[str, dict]] = None, ) -> Optional[dict]: from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters_to_dict + from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_variables_to_dict + + if self.query and dashboard_variables_override: + self.query = apply_dashboard_variables_to_dict(self.query, dashboard_variables_override or {}, self.team) if not (dashboard or dashboard_filters_override) or not self.query: return self.query diff --git a/posthog/models/insight_variable.py b/posthog/models/insight_variable.py index fc023ca1732c8..8c9acdb606874 100644 --- a/posthog/models/insight_variable.py +++ b/posthog/models/insight_variable.py @@ -15,5 +15,6 @@ class Type(models.TextChoices): code_name = models.CharField(max_length=400, null=True, blank=True) type = models.CharField(max_length=128, choices=Type.choices) default_value = models.JSONField(null=True, blank=True) + values = models.JSONField(null=True, blank=True) __repr__ = sane_repr("id") diff --git a/posthog/models/integration.py b/posthog/models/integration.py index fe5c0947899ee..6b5effb9a83e3 100644 --- a/posthog/models/integration.py +++ b/posthog/models/integration.py @@ -45,6 +45,7 @@ class IntegrationKind(models.TextChoices): HUBSPOT = "hubspot" GOOGLE_PUBSUB = "google-pubsub" GOOGLE_CLOUD_STORAGE = "google-cloud-storage" + GOOGLE_ADS = "google-ads" team = models.ForeignKey("Team", on_delete=models.CASCADE) @@ -107,10 +108,11 @@ class OauthConfig: name_path: str token_info_url: Optional[str] = None token_info_config_fields: Optional[list[str]] = None + additional_authorize_params: Optional[dict[str, str]] = None class OauthIntegration: - supported_kinds = ["slack", "salesforce", "hubspot"] + supported_kinds = ["slack", "salesforce", "hubspot", "google-ads"] integration: Integration def __init__(self, integration: Integration) -> None: @@ -168,6 +170,23 @@ def oauth_config_for_kind(cls, kind: str) -> OauthConfig: id_path="hub_id", name_path="hub_domain", ) + elif kind == "google-ads": + if not settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY or not settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: + raise NotImplementedError("Google Ads app not configured") + + return OauthConfig( + authorize_url="https://accounts.google.com/o/oauth2/v2/auth", + # forces the consent screen, otherwise we won't receive a refresh token + additional_authorize_params={"access_type": "offline", "prompt": "consent"}, + token_info_url="https://openidconnect.googleapis.com/v1/userinfo", + token_info_config_fields=["sub", "email"], + token_url="https://oauth2.googleapis.com/token", + client_id=settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY, + client_secret=settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET, + scope="https://www.googleapis.com/auth/adwords email", + id_path="sub", + name_path="email", + ) raise NotImplementedError(f"Oauth config for kind {kind} not implemented") @@ -186,6 +205,7 @@ def authorize_url(cls, kind: str, next="") -> str: "redirect_uri": cls.redirect_uri(kind), "response_type": "code", "state": urlencode({"next": next}), + **(oauth_config.additional_authorize_params or {}), } return f"{oauth_config.authorize_url}?{urlencode(query_params)}" diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 30d2cb546892c..21954626ea5b0 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -200,11 +200,7 @@ class Meta: related_query_name="team", ) project = models.ForeignKey( - "posthog.Project", - on_delete=models.CASCADE, - related_name="teams", - related_query_name="team", - null=True, + "posthog.Project", on_delete=models.CASCADE, related_name="teams", related_query_name="team" ) api_token = models.CharField( max_length=200, @@ -246,6 +242,9 @@ class Meta: ) session_recording_linked_flag = models.JSONField(null=True, blank=True) session_recording_network_payload_capture_config = models.JSONField(null=True, blank=True) + session_recording_url_trigger_config = ArrayField( + models.JSONField(null=True, blank=True), default=list, blank=True, null=True + ) session_replay_config = models.JSONField(null=True, blank=True) survey_config = models.JSONField(null=True, blank=True) capture_console_log_opt_in = models.BooleanField(null=True, blank=True, default=True) @@ -283,7 +282,7 @@ class Meta: # during feature releases. extra_settings = models.JSONField(null=True, blank=True) - # Project level default HogQL query modifiers + # Environment-level default HogQL query modifiers modifiers = models.JSONField(null=True, blank=True) # This is meant to be used as a stopgap until https://github.com/PostHog/meta/pull/39 gets implemented diff --git a/posthog/models/test/test_integration_model.py b/posthog/models/test/test_integration_model.py index b1802df4ebefe..456f085d9c2e9 100644 --- a/posthog/models/test/test_integration_model.py +++ b/posthog/models/test/test_integration_model.py @@ -89,6 +89,8 @@ class TestOauthIntegrationModel(BaseTest): "SALESFORCE_CONSUMER_SECRET": "salesforce-client-secret", "HUBSPOT_APP_CLIENT_ID": "hubspot-client-id", "HUBSPOT_APP_CLIENT_SECRET": "hubspot-client-secret", + "SOCIAL_AUTH_GOOGLE_OAUTH2_KEY": "google-client-id", + "SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET": "google-client-secret", } def create_integration( @@ -113,6 +115,14 @@ def test_authorize_url(self): == "https://login.salesforce.com/services/oauth2/authorize?client_id=salesforce-client-id&scope=full+refresh_token&redirect_uri=https%3A%2F%2Flocalhost%3A8000%2Fintegrations%2Fsalesforce%2Fcallback&response_type=code&state=next%3D%252Fprojects%252Ftest" ) + def test_authorize_url_with_additional_authorize_params(self): + with self.settings(**self.mock_settings): + url = OauthIntegration.authorize_url("google-ads", next="/projects/test") + assert ( + url + == "https://accounts.google.com/o/oauth2/v2/auth?client_id=google-client-id&scope=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fadwords+email&redirect_uri=https%3A%2F%2Flocalhost%3A8000%2Fintegrations%2Fgoogle-ads%2Fcallback&response_type=code&state=next%3D%252Fprojects%252Ftest&access_type=offline&prompt=consent" + ) + @patch("posthog.models.integration.requests.post") def test_integration_from_oauth_response(self, mock_post): with self.settings(**self.mock_settings): diff --git a/posthog/models/user.py b/posthog/models/user.py index 6819d6282d9f4..3b25009931c7c 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -149,6 +149,11 @@ class User(AbstractUser, UUIDClassicModel): requested_password_reset_at = models.DateTimeField(null=True, blank=True) has_seen_product_intro_for = models.JSONField(null=True, blank=True) strapi_id = models.PositiveSmallIntegerField(null=True, blank=True) + is_active = models.BooleanField( + _("active"), + default=True, + help_text=_("Unselect this to temporarily disable an account."), + ) # Preferences / configuration options diff --git a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr index bb5655daf5e6f..9332bd30f12a1 100644 --- a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr +++ b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr @@ -83,7 +83,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -96,6 +95,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -139,7 +139,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -152,6 +151,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -195,7 +195,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -208,6 +207,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -251,7 +251,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -264,6 +263,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/queries/foss_cohort_query.py b/posthog/queries/foss_cohort_query.py index d4925856afd94..a7e020158872e 100644 --- a/posthog/queries/foss_cohort_query.py +++ b/posthog/queries/foss_cohort_query.py @@ -613,6 +613,9 @@ def _get_entity( self._team_id, f"{prepend}_entity_{idx}", self._filter.hogql_context, + person_properties_mode=PersonPropertiesMode.DIRECT_ON_EVENTS + if self._person_on_events_mode != PersonsOnEventsMode.DISABLED + else None, ) elif event[0] == "events": self._add_event(str(event[1])) diff --git a/posthog/schema.py b/posthog/schema.py index f588282aa5554..1ba777cd25b26 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -50,17 +50,17 @@ class AlertCalculationInterval(StrEnum): MONTHLY = "monthly" -class AlertCondition(BaseModel): - pass - model_config = ConfigDict( - extra="forbid", - ) +class AlertConditionType(StrEnum): + ABSOLUTE_VALUE = "absolute_value" + RELATIVE_INCREASE = "relative_increase" + RELATIVE_DECREASE = "relative_decrease" class AlertState(StrEnum): FIRING = "Firing" NOT_FIRING = "Not firing" ERRORED = "Errored" + SNOOZED = "Snoozed" class Kind(StrEnum): @@ -517,7 +517,15 @@ class EventsQueryPersonColumn(BaseModel): uuid: str -class ExperimentVariantFunnelResult(BaseModel): +class ExperimentSignificanceCode(StrEnum): + SIGNIFICANT = "significant" + NOT_ENOUGH_EXPOSURE = "not_enough_exposure" + LOW_WIN_PROBABILITY = "low_win_probability" + HIGH_LOSS = "high_loss" + HIGH_P_VALUE = "high_p_value" + + +class ExperimentVariantFunnelsBaseStats(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -526,7 +534,7 @@ class ExperimentVariantFunnelResult(BaseModel): success_count: float -class ExperimentVariantTrendResult(BaseModel): +class ExperimentVariantTrendsBaseStats(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -789,7 +797,12 @@ class InsightNodeKind(StrEnum): LIFECYCLE_QUERY = "LifecycleQuery" -class InsightsThresholdAbsolute(BaseModel): +class InsightThresholdType(StrEnum): + ABSOLUTE = "absolute" + PERCENTAGE = "percentage" + + +class InsightsThresholdBounds(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -862,8 +875,8 @@ class NodeKind(StrEnum): WEB_STATS_TABLE_QUERY = "WebStatsTableQuery" WEB_EXTERNAL_CLICKS_TABLE_QUERY = "WebExternalClicksTableQuery" WEB_GOALS_QUERY = "WebGoalsQuery" - EXPERIMENT_FUNNEL_QUERY = "ExperimentFunnelQuery" - EXPERIMENT_TREND_QUERY = "ExperimentTrendQuery" + EXPERIMENT_FUNNELS_QUERY = "ExperimentFunnelsQuery" + EXPERIMENT_TRENDS_QUERY = "ExperimentTrendsQuery" DATABASE_SCHEMA_QUERY = "DatabaseSchemaQuery" SUGGESTED_QUESTIONS_QUERY = "SuggestedQuestionsQuery" TEAM_TAXONOMY_QUERY = "TeamTaxonomyQuery" @@ -1014,38 +1027,6 @@ class QueryResponseAlternative7(BaseModel): warnings: list[HogQLNotice] -class QueryResponseAlternative16(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["FUNNELS"] = "FUNNELS" - results: dict[str, ExperimentVariantFunnelResult] - - -class QueryResponseAlternative17(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["TRENDS"] = "TRENDS" - results: dict[str, ExperimentVariantTrendResult] - - -class QueryResponseAlternative28(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["FUNNELS"] = "FUNNELS" - results: dict[str, ExperimentVariantFunnelResult] - - -class QueryResponseAlternative29(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["TRENDS"] = "TRENDS" - results: dict[str, ExperimentVariantTrendResult] - - class QueryResponseAlternative38(BaseModel): model_config = ConfigDict( extra="forbid", @@ -1720,6 +1701,13 @@ class ActorsQueryResponse(BaseModel): types: list[str] +class AlertCondition(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + type: AlertConditionType + + class Breakdown(BaseModel): model_config = ConfigDict( extra="forbid", @@ -1926,27 +1914,7 @@ class CachedEventsQueryResponse(BaseModel): types: list[str] -class CachedExperimentFunnelQueryResponse(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - cache_key: str - cache_target_age: Optional[AwareDatetime] = None - calculation_trigger: Optional[str] = Field( - default=None, description="What triggered the calculation of the query, leave empty if user/immediate" - ) - insight: Literal["FUNNELS"] = "FUNNELS" - is_cached: bool - last_refresh: AwareDatetime - next_allowed_client_refresh: AwareDatetime - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: dict[str, ExperimentVariantFunnelResult] - timezone: str - - -class CachedExperimentTrendQueryResponse(BaseModel): +class CachedExperimentTrendsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -1955,15 +1923,20 @@ class CachedExperimentTrendQueryResponse(BaseModel): calculation_trigger: Optional[str] = Field( default=None, description="What triggered the calculation of the query, leave empty if user/immediate" ) - insight: Literal["TRENDS"] = "TRENDS" + credible_intervals: dict[str, list[float]] + insight: TrendsQueryResponse is_cached: bool last_refresh: AwareDatetime next_allowed_client_refresh: AwareDatetime + p_value: float + probability: dict[str, float] query_status: Optional[QueryStatus] = Field( default=None, description="Query status indicates whether next to the provided data, a query is still running." ) - results: dict[str, ExperimentVariantTrendResult] + significance_code: ExperimentSignificanceCode + significant: bool timezone: str + variants: list[ExperimentVariantTrendsBaseStats] class CachedFunnelCorrelationResponse(BaseModel): @@ -2690,20 +2663,17 @@ class Response9(BaseModel): ) -class Response10(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["FUNNELS"] = "FUNNELS" - results: dict[str, ExperimentVariantFunnelResult] - - class Response11(BaseModel): model_config = ConfigDict( extra="forbid", ) - insight: Literal["TRENDS"] = "TRENDS" - results: dict[str, ExperimentVariantTrendResult] + credible_intervals: dict[str, list[float]] + insight: TrendsQueryResponse + p_value: float + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantTrendsBaseStats] class DataWarehousePersonPropertyFilter(BaseModel): @@ -2856,20 +2826,17 @@ class EventsQueryResponse(BaseModel): types: list[str] -class ExperimentFunnelQueryResponse(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - insight: Literal["FUNNELS"] = "FUNNELS" - results: dict[str, ExperimentVariantFunnelResult] - - -class ExperimentTrendQueryResponse(BaseModel): +class ExperimentTrendsQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", ) - insight: Literal["TRENDS"] = "TRENDS" - results: dict[str, ExperimentVariantTrendResult] + credible_intervals: dict[str, list[float]] + insight: TrendsQueryResponse + p_value: float + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantTrendsBaseStats] class BreakdownFilter1(BaseModel): @@ -3079,7 +3046,8 @@ class InsightThreshold(BaseModel): model_config = ConfigDict( extra="forbid", ) - absoluteThreshold: Optional[InsightsThresholdAbsolute] = None + bounds: Optional[InsightsThresholdBounds] = None + type: InsightThresholdType class LifecycleFilter(BaseModel): @@ -3461,6 +3429,32 @@ class QueryResponseAlternative15(BaseModel): ) +class QueryResponseAlternative16(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + expected_loss: float + insight: FunnelsQueryResponse + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantFunnelsBaseStats] + + +class QueryResponseAlternative17(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + insight: TrendsQueryResponse + p_value: float + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantTrendsBaseStats] + + class QueryResponseAlternative18(BaseModel): model_config = ConfigDict( extra="forbid", @@ -3697,6 +3691,32 @@ class QueryResponseAlternative27(BaseModel): ) +class QueryResponseAlternative28(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + expected_loss: float + insight: FunnelsQueryResponse + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantFunnelsBaseStats] + + +class QueryResponseAlternative29(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + insight: TrendsQueryResponse + p_value: float + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantTrendsBaseStats] + + class QueryResponseAlternative30(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4133,6 +4153,31 @@ class AnyResponseType( ] +class CachedExperimentFunnelsQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + cache_key: str + cache_target_age: Optional[AwareDatetime] = None + calculation_trigger: Optional[str] = Field( + default=None, description="What triggered the calculation of the query, leave empty if user/immediate" + ) + credible_intervals: dict[str, list[float]] + expected_loss: float + insight: FunnelsQueryResponse + is_cached: bool + last_refresh: AwareDatetime + next_allowed_client_refresh: AwareDatetime + probability: dict[str, float] + query_status: Optional[QueryStatus] = Field( + default=None, description="Query status indicates whether next to the provided data, a query is still running." + ) + significance_code: ExperimentSignificanceCode + significant: bool + timezone: str + variants: list[ExperimentVariantFunnelsBaseStats] + + class CachedHogQLQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4284,6 +4329,19 @@ class Response2(BaseModel): types: Optional[list] = Field(default=None, description="Types of returned columns") +class Response10(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + expected_loss: float + insight: FunnelsQueryResponse + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantFunnelsBaseStats] + + class DataWarehouseNode(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4520,6 +4578,19 @@ class EventsNode(BaseModel): response: Optional[dict[str, Any]] = None +class ExperimentFunnelsQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + credible_intervals: dict[str, list[float]] + expected_loss: float + insight: FunnelsQueryResponse + probability: dict[str, float] + significance_code: ExperimentSignificanceCode + significant: bool + variants: list[ExperimentVariantFunnelsBaseStats] + + class FunnelExclusionActionsNode(BaseModel): model_config = ConfigDict( extra="forbid", @@ -5430,18 +5501,18 @@ class EventsQuery(BaseModel): where: Optional[list[str]] = Field(default=None, description="HogQL filters to apply on returned data") -class ExperimentTrendQuery(BaseModel): +class ExperimentTrendsQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) count_query: TrendsQuery experiment_id: int exposure_query: Optional[TrendsQuery] = None - kind: Literal["ExperimentTrendQuery"] = "ExperimentTrendQuery" + kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery" modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - response: Optional[ExperimentTrendQueryResponse] = None + response: Optional[ExperimentTrendsQueryResponse] = None class FunnelsQuery(BaseModel): @@ -5855,16 +5926,16 @@ class DatabaseSchemaQueryResponse(BaseModel): ] -class ExperimentFunnelQuery(BaseModel): +class ExperimentFunnelsQuery(BaseModel): model_config = ConfigDict( extra="forbid", ) experiment_id: int - kind: Literal["ExperimentFunnelQuery"] = "ExperimentFunnelQuery" + kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery" modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - response: Optional[ExperimentFunnelQueryResponse] = None + response: Optional[ExperimentFunnelsQueryResponse] = None source: FunnelsQuery @@ -6184,8 +6255,8 @@ class DataTableNode(BaseModel): WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, ] = Field(..., description="Source of the events") @@ -6224,8 +6295,8 @@ class HogQLAutocomplete(BaseModel): WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, ] ] = Field(default=None, description="Query in whose context to validate.") startPosition: int = Field(..., description="Start position of the editor word") @@ -6268,8 +6339,8 @@ class HogQLMetadata(BaseModel): WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, ] ] = Field( default=None, @@ -6310,8 +6381,8 @@ class QueryRequest(BaseModel): WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, @@ -6349,6 +6420,7 @@ class QueryRequest(BaseModel): " `query_status` response field." ), ) + variables_override: Optional[dict[str, dict[str, Any]]] = None class QuerySchemaRoot( @@ -6374,8 +6446,8 @@ class QuerySchemaRoot( WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, @@ -6413,8 +6485,8 @@ class QuerySchemaRoot( WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, - ExperimentFunnelQuery, - ExperimentTrendQuery, + ExperimentFunnelsQuery, + ExperimentTrendsQuery, DataVisualizationNode, DataTableNode, SavedInsightNode, diff --git a/posthog/session_recordings/queries/session_recording_list_from_filters.py b/posthog/session_recordings/queries/session_recording_list_from_filters.py index 354e4232cee84..38e06c4e4837d 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/session_recording_list_from_filters.py @@ -2,6 +2,8 @@ from typing import Any, NamedTuple, cast, Optional, Union from datetime import datetime, timedelta +import posthoganalytics + from posthog.hogql import ast from posthog.hogql.ast import CompareOperation from posthog.hogql.parser import parse_select @@ -33,12 +35,22 @@ def is_group_property(p: Property) -> bool: return p.type == "group" +def is_cohort_property(p: Property) -> bool: + return "cohort" in p.type + + class SessionRecordingQueryResult(NamedTuple): results: list has_more_recording: bool timings: list[QueryTiming] | None = None +class UnexpectedQueryProperties(Exception): + def __init__(self, remaining_properties: PropertyGroup | None): + self.remaining_properties = remaining_properties + super().__init__(f"Unexpected properties in query: {remaining_properties}") + + class SessionRecordingListFromFilters: SESSION_RECORDINGS_DEFAULT_LIMIT = 50 @@ -224,11 +236,19 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: ) ) - remaining_properties = self._strip_person_and_event_properties(self._filter.property_groups) - if remaining_properties: - logger.info( - "session_replay_query_builder has unhandled properties", unhandled_properties=remaining_properties + cohort_subquery = CohortPropertyGroupsSubQuery(self._team, self._filter, self.ttl_days).get_query() + if cohort_subquery: + optional_exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["s", "distinct_id"]), + right=cohort_subquery, + ) ) + + remaining_properties = self._strip_person_and_event_and_cohort_properties(self._filter.property_groups) + if remaining_properties: + posthoganalytics.capture_exception(UnexpectedQueryProperties(remaining_properties)) optional_exprs.append(property_to_expr(remaining_properties, team=self._team, scope="replay")) if self._filter.console_log_filters.values: @@ -267,11 +287,14 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: def _having_predicates(self) -> ast.Expr: return property_to_expr(self._filter.having_predicates, team=self._team, scope="replay") - def _strip_person_and_event_properties(self, property_group: PropertyGroup) -> PropertyGroup | None: + def _strip_person_and_event_and_cohort_properties(self, property_group: PropertyGroup) -> PropertyGroup | None: property_groups_to_keep = [ g for g in property_group.flat - if not is_event_property(g) and not is_person_property(g) and not is_group_property(g) + if not is_event_property(g) + and not is_person_property(g) + and not is_group_property(g) + and not is_cohort_property(g) ] return ( @@ -334,6 +357,44 @@ def _where_predicates(self) -> ast.Expr: ) +class CohortPropertyGroupsSubQuery: + _team: Team + _filter: SessionRecordingsFilter + _ttl_days: int + + raw_cohort_to_distinct_id = """ + select distinct_id + from person_distinct_ids + where {cohort_predicate} + """ + + def __init__(self, team: Team, filter: SessionRecordingsFilter, ttl_days: int): + self._team = team + self._filter = filter + self._ttl_days = ttl_days + + def get_query(self) -> ast.SelectQuery | ast.SelectUnionQuery | None: + if self.cohort_properties: + return parse_select( + self.raw_cohort_to_distinct_id, + {"cohort_predicate": property_to_expr(self.cohort_properties, team=self._team, scope="replay")}, + ) + + return None + + @cached_property + def cohort_properties(self) -> PropertyGroup | None: + cohort_property_groups = [g for g in self._filter.property_groups.flat if is_cohort_property(g)] + return ( + PropertyGroup( + type=self._filter.property_operand, + values=cohort_property_groups, + ) + if cohort_property_groups + else None + ) + + class PersonsIdCompareOperation: _team: Team _filter: SessionRecordingsFilter diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr index 72161f92f2223..a0fc5699f9ee7 100644 --- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr @@ -3161,17 +3161,18 @@ sum(s.console_error_count) AS console_error_count, ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), ifNull(in(s__pdi.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)) + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE ifNull(in(person_distinct_ids.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3224,22 +3225,23 @@ sum(s.console_error_count) AS console_error_count, ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), 1)) GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), ifNull(in(s__pdi.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0))) + HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE ifNull(in(person_distinct_ids.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3272,22 +3274,23 @@ sum(s.console_error_count) AS console_error_count, ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing FROM session_replay_events AS s - INNER JOIN - (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, - person_distinct_id2.distinct_id AS distinct_id - FROM person_distinct_id2 - WHERE equals(person_distinct_id2.team_id, 2) - GROUP BY person_distinct_id2.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS s__pdi ON equals(s.distinct_id, s__pdi.distinct_id) WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), and(in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-13 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), and(equals(events.event, 'custom_event'), 1)) GROUP BY events.`$session_id` - HAVING hasAll(groupUniqArray(events.event), ['custom_event']))), ifNull(in(s__pdi.person_id, - (SELECT cohortpeople.person_id AS person_id - FROM cohortpeople - WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0))) + HAVING hasAll(groupUniqArray(events.event), ['custom_event']))), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE ifNull(in(person_distinct_ids.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3407,6 +3410,190 @@ max_bytes_before_external_group_by=0 ''' # --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties + ''' + + SELECT count(DISTINCT person_id) + FROM person_static_cohort + WHERE team_id = 2 + AND cohort_id = 2 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.1 + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.2 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.3 + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.4 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 2 + AND cohort_id = 2 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.5 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE ifNull(in(person_distinct_ids.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 2)))), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.6 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE ifNull(in(person_distinct_ids.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_filter_with_static_and_dynamic_cohort_properties.7 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 20:00:00.000000', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_ids.distinct_id AS distinct_id + FROM + (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE equals(person_distinct_id2.team_id, 2) + GROUP BY person_distinct_id2.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS person_distinct_ids + WHERE and(ifNull(in(person_distinct_ids.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 2), equals(cohortpeople.version, 0)))), 0), ifNull(in(person_distinct_ids.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 2)))), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- # name: TestSessionRecordingsListFromFilters.test_multiple_event_filters ''' SELECT s.session_id AS session_id, diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py index eec4a40f74b75..e18cb0941d27a 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py @@ -2161,15 +2161,141 @@ def test_filter_with_cohort_properties(self): { "key": "id", "value": cohort.pk, - "operator": None, + "operator": "in", "type": "cohort", } ] } ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_two + assert [x["session_id"] for x in session_recordings] == [session_id_two] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["$some_prop"]) + def test_filter_with_static_and_dynamic_cohort_properties(self): + with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): + with freeze_time("2021-08-21T20:00:00.000Z"): + user_one = "test_filter_with_cohort_properties-user-in-static-cohort" + user_two = "test_filter_with_cohort_properties-user2-in-dynamic-cohort" + user_three = "test_filter_with_cohort_properties-user3-in-both-cohort" + + session_id_one = ( + f"in-static-cohort-test_filter_with_static_and_dynamic_cohort_properties-1-{str(uuid4())}" + ) + session_id_two = ( + f"in-dynamic-cohort-test_filter_with_static_and_dynamic_cohort_properties-2-{str(uuid4())}" + ) + session_id_three = ( + f"in-both-cohort-test_filter_with_static_and_dynamic_cohort_properties-3-{str(uuid4())}" + ) + + Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "in@static.cohort"}) + Person.objects.create( + team=self.team, + distinct_ids=[user_two], + properties={"email": "in@dynamic.cohort", "$some_prop": "some_val"}, + ) + Person.objects.create( + team=self.team, + distinct_ids=[user_three], + properties={"email": "in@both.cohorts", "$some_prop": "some_val"}, + ) + + dynamic_cohort = Cohort.objects.create( + team=self.team, + name="cohort1", + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], + ) + + static_cohort = Cohort.objects.create(team=self.team, name="a static cohort", groups=[], is_static=True) + static_cohort.insert_users_by_list([user_one, user_three]) + + dynamic_cohort.calculate_people_ch(pending_version=0) + static_cohort.calculate_people_ch(pending_version=0) + + replay_summaries = [ + (user_one, session_id_one), + (user_two, session_id_two), + (user_three, session_id_three), + ] + for distinct_id, session_id in replay_summaries: + produce_replay_summary( + distinct_id=distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": static_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == sorted( + [session_id_one, session_id_three] + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": dynamic_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == sorted( + [session_id_two, session_id_three] + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": dynamic_cohort.pk, + "operator": "in", + "type": "cohort", + }, + { + "key": "id", + "value": static_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == [session_id_three] @snapshot_clickhouse_queries @also_test_with_materialized_columns(person_properties=["$some_prop"]) @@ -2251,7 +2377,7 @@ def test_filter_with_events_and_cohorts(self): { "key": "id", "value": cohort.pk, - "operator": None, + "operator": "in", "type": "cohort", } ], @@ -2274,7 +2400,7 @@ def test_filter_with_events_and_cohorts(self): { "key": "id", "value": cohort.pk, - "operator": None, + "operator": "in", "type": "cohort", } ], @@ -2289,8 +2415,7 @@ def test_filter_with_events_and_cohorts(self): } ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_id_two + assert [x["session_id"] for x in session_recordings] == [session_id_two] @snapshot_clickhouse_queries @also_test_with_materialized_columns(["$current_url"]) diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index cf5c8dd3c7dde..550009ec55f1c 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -25,6 +25,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -90,6 +91,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -155,6 +157,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -220,6 +223,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -285,6 +289,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -408,7 +413,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -420,6 +424,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -458,6 +463,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -548,6 +554,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -668,7 +675,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -681,6 +687,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -793,7 +800,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -806,6 +812,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -917,6 +924,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1014,6 +1022,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1079,6 +1088,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1144,6 +1154,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1209,6 +1220,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1274,6 +1286,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1339,6 +1352,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1386,7 +1400,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1398,6 +1411,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1436,6 +1450,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1490,7 +1505,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1503,6 +1517,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1633,6 +1648,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1718,7 +1734,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1730,6 +1745,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -1768,6 +1784,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -1932,7 +1949,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -1945,6 +1961,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2057,7 +2074,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2070,6 +2086,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2229,6 +2246,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2314,7 +2332,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2326,6 +2343,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2364,6 +2382,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2513,6 +2532,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2574,7 +2594,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2587,6 +2606,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2699,7 +2719,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2712,6 +2731,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -2865,6 +2885,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -2969,7 +2990,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -2981,6 +3001,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3019,6 +3040,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3164,7 +3186,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3177,6 +3198,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3308,7 +3330,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3321,6 +3342,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3476,6 +3498,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3561,7 +3584,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3573,6 +3595,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3593,7 +3616,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3605,6 +3627,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3643,6 +3666,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3788,7 +3812,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3801,6 +3824,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -3917,6 +3941,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -3971,7 +3996,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -3984,6 +4008,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4141,6 +4166,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4226,7 +4252,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4238,6 +4263,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4276,6 +4302,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -4453,7 +4480,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4466,6 +4492,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4578,7 +4605,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4591,6 +4617,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4837,7 +4864,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4850,6 +4876,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -4962,7 +4989,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -4975,6 +5001,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -5142,6 +5169,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5227,7 +5255,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -5239,6 +5266,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -5277,6 +5305,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5434,7 +5463,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -5447,6 +5475,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -5559,7 +5588,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -5572,6 +5600,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -5715,6 +5744,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -5821,7 +5851,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -5833,6 +5862,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -5871,6 +5901,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6016,7 +6047,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6029,6 +6059,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6108,7 +6139,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6121,6 +6151,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6214,7 +6245,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6227,6 +6257,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6372,6 +6403,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6476,7 +6508,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6488,6 +6519,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6526,6 +6558,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -6671,7 +6704,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6684,6 +6716,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6808,7 +6841,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -6821,6 +6853,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -6968,6 +7001,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7053,7 +7087,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -7065,6 +7098,7 @@ "posthog_user"."is_email_verified", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -7103,6 +7137,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -7269,7 +7304,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -7282,6 +7316,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -7394,7 +7429,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -7407,6 +7441,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/settings/temporal.py b/posthog/settings/temporal.py index b73a7a0b6af83..dcab7bfb9a58a 100644 --- a/posthog/settings/temporal.py +++ b/posthog/settings/temporal.py @@ -17,6 +17,7 @@ BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 100 # 100MB BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB BATCH_EXPORT_HTTP_BATCH_SIZE: int = 5000 +BATCH_EXPORT_BUFFER_QUEUE_MAX_SIZE_BYTES: int = 1024 * 1024 * 300 # 300MB UNCONSTRAINED_TIMESTAMP_TEAM_IDS: list[str] = get_list(os.getenv("UNCONSTRAINED_TIMESTAMP_TEAM_IDS", "")) ASYNC_ARROW_STREAMING_TEAM_IDS: list[str] = get_list(os.getenv("ASYNC_ARROW_STREAMING_TEAM_IDS", "")) diff --git a/posthog/settings/web.py b/posthog/settings/web.py index 92ec31382fe45..6e32dff2e6138 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -2,8 +2,8 @@ import os from datetime import timedelta -from corsheaders.defaults import default_headers import structlog +from corsheaders.defaults import default_headers from posthog.settings.base_variables import BASE_DIR, DEBUG, TEST from posthog.settings.utils import get_from_env, get_list, str_to_bool @@ -41,6 +41,9 @@ DECIDE_BILLING_SAMPLING_RATE = get_from_env("DECIDE_BILLING_SAMPLING_RATE", 0.1, type_cast=float) DECIDE_BILLING_ANALYTICS_TOKEN = get_from_env("DECIDE_BILLING_ANALYTICS_TOKEN", None, type_cast=str, optional=True) +# temporary, used for safe rollout of defaulting people into anonymous events / process_persons: identified_only +DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN: int = get_from_env("DEFAULT_IDENTIFIED_ONLY_TEAM_ID_MIN", 1000000, type_cast=int) + # Decide regular request analytics # Takes 3 possible formats, all separated by commas: # A number: "2" diff --git a/posthog/tasks/alerts/checks.py b/posthog/tasks/alerts/checks.py index 7af02e97a2ccd..d6f8c020f1d7e 100644 --- a/posthog/tasks/alerts/checks.py +++ b/posthog/tasks/alerts/checks.py @@ -1,17 +1,18 @@ +import math +import time +import traceback + from datetime import datetime, timedelta, UTC -from typing import Optional, cast +from typing import cast from dateutil.relativedelta import relativedelta from celery import shared_task from celery.canvas import chain +from django.conf import settings from django.db import transaction -from django.utils import timezone import structlog from sentry_sdk import capture_exception -from posthog.api.services.query import ExecutionMode -from posthog.caching.calculate_results import calculate_for_query_based_insight -from posthog.email import EmailMessage from posthog.errors import CHQueryErrorTooManySimultaneousQueries from posthog.hogql_queries.legacy_compatibility.flagged_conversion_manager import ( conversion_to_query_based, @@ -21,40 +22,36 @@ from posthog.tasks.utils import CeleryQueue from posthog.schema import ( TrendsQuery, - IntervalType, - ChartDisplayType, - NodeKind, AlertCalculationInterval, AlertState, - TrendsAlertConfig, ) from posthog.utils import get_from_dict_or_attr -from posthog.caching.fetch_from_cache import InsightResult -from posthog.clickhouse.client.limit import limit_concurrency from prometheus_client import Counter, Gauge from django.db.models import Q, F -from typing import TypedDict, NotRequired from collections import defaultdict +from posthog.tasks.alerts.utils import ( + AlertEvaluationResult, + calculation_interval_to_order, + send_notifications_for_errors, + send_notifications_for_breaches, + WRAPPER_NODE_KINDS, + alert_calculation_interval_to_relativedelta, +) +from posthog.tasks.alerts.trends import check_trends_alert -# TODO: move the TrendResult UI type to schema.ts and use that instead -class TrendResult(TypedDict): - action: dict - actions: list[dict] - count: int - data: list[float] - days: list[str] - dates: list[str] - label: str - labels: list[str] - breakdown_value: str | int | list[str] - aggregated_value: NotRequired[float] - status: str | None - compare_label: str | None - compare: bool - persons_urls: list[dict] - persons: dict - filter: dict +logger = structlog.get_logger(__name__) + + +class AlertCheckException(Exception): + """ + Required for custom exceptions to pass stack trace to sentry. + Subclassing through other ways doesn't transfer the traceback. + https://stackoverflow.com/a/69963663/5540417 + """ + + def __init__(self, err: Exception): + self.__traceback__ = err.__traceback__ HOURLY_ALERTS_BACKLOG_GAUGE = Gauge( @@ -78,28 +75,9 @@ class TrendResult(TypedDict): ) -logger = structlog.get_logger(__name__) - - -WRAPPER_NODE_KINDS = [NodeKind.DATA_TABLE_NODE, NodeKind.DATA_VISUALIZATION_NODE, NodeKind.INSIGHT_VIZ_NODE] - -NON_TIME_SERIES_DISPLAY_TYPES = { - ChartDisplayType.BOLD_NUMBER, - ChartDisplayType.ACTIONS_PIE, - ChartDisplayType.ACTIONS_BAR_VALUE, - ChartDisplayType.ACTIONS_TABLE, - ChartDisplayType.WORLD_MAP, -} - - -def calculation_interval_to_order(interval: AlertCalculationInterval | None) -> int: - match interval: - case AlertCalculationInterval.HOURLY: - return 0 - case AlertCalculationInterval.DAILY: - return 1 - case _: - return 2 +@shared_task(ignore_result=True) +def checks_cleanup_task() -> None: + AlertCheck.clean_up_old_checks() @shared_task( @@ -136,6 +114,9 @@ def alerts_backlog_task() -> None: DAILY_ALERTS_BACKLOG_GAUGE.set(daily_alerts_breaching_sla) + # sleeping 30s for prometheus to pick up the metrics sent during task + time.sleep(30) + @shared_task( ignore_result=True, @@ -145,48 +126,18 @@ def check_alerts_task() -> None: """ This runs every 2min to check for alerts that are due to recalculate """ - check_alerts() - - -@shared_task( - ignore_result=True, - queue=CeleryQueue.ALERTS.value, - autoretry_for=(CHQueryErrorTooManySimultaneousQueries,), - retry_backoff=1, - retry_backoff_max=10, - max_retries=3, - expires=60 * 60, -) -@limit_concurrency(5) # Max 5 concurrent alert checks -def check_alert_task(alert_id: str) -> None: - try: - check_alert(alert_id) - except Exception as err: - ALERT_CHECK_ERROR_COUNTER.inc() - capture_exception(Exception(f"Error checking alert, user wasn't notified: {err}")) - raise - - -@shared_task(ignore_result=True) -def checks_cleanup_task() -> None: - AlertCheck.clean_up_old_checks() - - -def check_alerts() -> None: now = datetime.now(UTC) # Use a fixed expiration time since tasks in the chain are executed sequentially expire_after = now + timedelta(minutes=30) - # find all alerts with the provided interval that are due to be calculated (next_check_at is null or less than now) + # find all alerts with the provided interval that are due to be calculated + # (next_check_at is null or less than now) and it's not snoozed alerts = ( AlertConfiguration.objects.filter( Q(enabled=True, is_calculating=False, next_check_at__lte=now) - | Q( - enabled=True, - is_calculating=False, - next_check_at__isnull=True, - ) + | Q(enabled=True, is_calculating=False, next_check_at__isnull=True) ) + .filter(Q(snoozed_until__isnull=True) | Q(snoozed_until__lt=now)) .order_by(F("next_check_at").asc(nulls_first=True)) .only("id", "team", "calculation_interval") ) @@ -207,7 +158,23 @@ def check_alerts() -> None: chain(*(check_alert_task.si(str(alert_id)).set(expires=expire_after) for alert_id in alert_ids))() +@shared_task( + ignore_result=True, + queue=CeleryQueue.ALERTS.value, + autoretry_for=(CHQueryErrorTooManySimultaneousQueries,), + retry_backoff=1, + retry_backoff_max=10, + max_retries=3, + expires=60 * 60, +) +# @limit_concurrency(5) Concurrency controlled by CeleryQueue.ALERTS for now +def check_alert_task(alert_id: str) -> None: + check_alert(alert_id) + + def check_alert(alert_id: str) -> None: + task_start_time = time.time() + try: alert = AlertConfiguration.objects.get(id=alert_id, enabled=True) except AlertConfiguration.DoesNotExist: @@ -230,12 +197,35 @@ def check_alert(alert_id: str) -> None: ) return + if alert.snoozed_until: + if alert.snoozed_until > now: + logger.warning( + "Alert has been snoozed so skipping checking it now", + alert=alert, + ) + return + else: + # not snoozed (anymore) so clear snoozed_until + alert.snoozed_until = None + alert.state = AlertState.NOT_FIRING + alert.is_calculating = True alert.save() try: - check_alert_atomically(alert) - except Exception: + check_alert_and_notify_atomically(alert) + except Exception as err: + ALERT_CHECK_ERROR_COUNTER.inc() + + logger.exception(AlertCheckException(err)) + capture_exception( + AlertCheckException(err), + tags={ + "alert_configuration_id": alert_id, + }, + ) + + # raise again so alert check is retried depending on error type raise finally: # Get all updates with alert checks @@ -243,186 +233,133 @@ def check_alert(alert_id: str) -> None: alert.is_calculating = False alert.save() + # only in PROD + if not settings.DEBUG and not settings.TEST: + task_duration = time.time() - task_start_time + + # Ensure task runs at least 40s + # for prometheus to pick up the metrics sent during task + time_left_to_run = 40 - math.floor(task_duration) + time.sleep(time_left_to_run) + @transaction.atomic -def check_alert_atomically(alert: AlertConfiguration) -> None: +def check_alert_and_notify_atomically(alert: AlertConfiguration) -> None: """ - Alert check only gets updated when we successfully - 1. Compute the aggregated value for the insight for the interval - 2. Compare the aggregated value with the threshold - 3. Send notifications if breaches are found + Computes insight results, checks alert for breaches and notifies user. + Only commits updates to alert state if all of the above complete successfully. + TODO: Later separate notification mechanism from alert checking mechanism (when we move to CDP) + so we can retry notification without re-computing insight. """ ALERT_COMPUTED_COUNTER.inc() + value = breaches = error = None - insight = alert.insight - aggregated_value: Optional[float] = None - error: Optional[dict] = None - + # 1. Evaluate insight and get alert value try: - with conversion_to_query_based(insight): - query = insight.query - kind = get_from_dict_or_attr(query, "kind") - - if kind in WRAPPER_NODE_KINDS: - query = get_from_dict_or_attr(query, "source") - kind = get_from_dict_or_attr(query, "kind") - - if kind == "TrendsQuery": - query = TrendsQuery.model_validate(query) - - filters_override = _calculate_date_range_override_for_alert(query) - - calculation_result = calculate_for_query_based_insight( - insight, - execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, - user=None, - filters_override=filters_override, - ) - else: - raise NotImplementedError(f"Alerts for {query.kind} are not supported yet") - - if not calculation_result.result: - raise RuntimeError(f"No results for alert {alert.id}") - - aggregated_value = _aggregate_insight_result_value(alert, query, calculation_result) + alert_evaluation_result = check_alert_for_insight(alert) + value = alert_evaluation_result.value + breaches = alert_evaluation_result.breaches except CHQueryErrorTooManySimultaneousQueries: - # error on our side, need to make sure to retry the alert check + # error on our side so we raise + # as celery task can be retried according to config raise except Exception as err: - # error possibly on user's config side - # notify user that alert check errored - error_message = f"AlertCheckError: error computing aggregate value for insight, alert_id = {alert.id}" - logger.exception(error_message) + capture_exception(AlertCheckException(err)) + # error can be on user side (incorrectly configured insight/alert) + # we won't retry and set alert to errored state + error = {"message": str(err), "traceback": traceback.format_exc()} - event_id = capture_exception( - Exception(error_message), - {"alert_id": alert.id, "query": str(query), "message": str(err)}, - ) + # 2. Check alert value against threshold + alert_check = add_alert_check(alert, value, breaches, error) - error = { - "sentry_event_id": event_id, - "message": f"{error_message}: {str(err)}", - } - - try: - # Lock alert to prevent concurrent state changes - alert = AlertConfiguration.objects.select_for_update().get(id=alert.id, enabled=True) - check, breaches, error, notify = alert.add_check(aggregated_value=aggregated_value, error=error) - except Exception as err: - error_message = f"AlertCheckError: error comparing insight value with threshold for alert_id = {alert.id}" - logger.exception(error_message) - - event_id = capture_exception( - Exception(error_message), - {"alert_id": alert.id, "query": str(query), "message": str(err)}, - ) - raise - - if not notify: - # no need to notify users + # 3. Notify users if needed + if not alert_check.targets_notified: return try: - match check.state: + match alert_check.state: case AlertState.NOT_FIRING: - logger.info("Check state is %s", check.state, alert_id=alert.id) + logger.info("Check state is %s", alert_check.state, alert_id=alert.id) case AlertState.ERRORED: - if error: - _send_notifications_for_errors(alert, error) + send_notifications_for_errors(alert, alert_check.error) case AlertState.FIRING: - _send_notifications_for_breaches(alert, breaches) + assert breaches is not None + send_notifications_for_breaches(alert, breaches) except Exception as err: error_message = f"AlertCheckError: error sending notifications for alert_id = {alert.id}" logger.exception(error_message) - event_id = capture_exception( + capture_exception( Exception(error_message), - {"alert_id": alert.id, "query": str(query), "message": str(err)}, + {"alert_id": alert.id, "message": str(err)}, ) + + # don't want alert state to be updated (so that it's retried as next_check_at won't be updated) + # so we raise again as @transaction.atomic decorator won't commit db updates + # TODO: later should have a way just to retry notification mechanism raise -def _calculate_date_range_override_for_alert(query: TrendsQuery) -> Optional[dict]: - if query.trendsFilter and query.trendsFilter.display in NON_TIME_SERIES_DISPLAY_TYPES: - # for single value insights, need to recompute with full time range - return None - - match query.interval: - case IntervalType.DAY: - date_from = "-1d" - case IntervalType.WEEK: - date_from = "-1w" - case IntervalType.MONTH: - date_from = "-1m" - case _: - date_from = "-1h" - - return {"date_from": date_from} - - -def _aggregate_insight_result_value(alert: AlertConfiguration, query: TrendsQuery, results: InsightResult) -> float: - if "type" in alert.config and alert.config["type"] == "TrendsAlertConfig": - alert_config = TrendsAlertConfig.model_validate(alert.config) - series_index = alert_config.series_index - result = cast(list[TrendResult], results.result)[series_index] - - if query.trendsFilter and query.trendsFilter.display in NON_TIME_SERIES_DISPLAY_TYPES: - return result["aggregated_value"] - - return result["data"][-1] - - raise ValueError(f"Unsupported alert config type: {alert_config.type}") - - -def _send_notifications_for_breaches(alert: AlertConfiguration, breaches: list[str]) -> None: - subject = f"PostHog alert {alert.name} is firing" - campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" - insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" - alert_url = f"{insight_url}/alerts/{alert.id}" - message = EmailMessage( - campaign_key=campaign_key, - subject=subject, - template_name="alert_check_firing", - template_context={ - "match_descriptions": breaches, - "insight_url": insight_url, - "insight_name": alert.insight.name, - "alert_url": alert_url, - "alert_name": alert.name, - }, +def check_alert_for_insight(alert: AlertConfiguration) -> AlertEvaluationResult: + """ + Matches insight type with alert checking logic + """ + insight = alert.insight + + with conversion_to_query_based(insight): + query = insight.query + kind = get_from_dict_or_attr(query, "kind") + + if kind in WRAPPER_NODE_KINDS: + query = get_from_dict_or_attr(query, "source") + kind = get_from_dict_or_attr(query, "kind") + + match kind: + case "TrendsQuery": + query = TrendsQuery.model_validate(query) + return check_trends_alert(alert, insight, query) + case _: + raise NotImplementedError(f"AlertCheckError: Alerts for {query.kind} are not supported yet") + + +def add_alert_check( + alert: AlertConfiguration, value: float | None, breaches: list[str] | None, error: dict | None +) -> AlertCheck: + notify = False + targets_notified = {} + + if error: + alert.state = AlertState.ERRORED + notify = True + elif breaches: + alert.state = AlertState.FIRING + notify = True + else: + alert.state = AlertState.NOT_FIRING # Set the Alert to not firing if the threshold is no longer met + # TODO: Optionally send a resolved notification when alert goes from firing to not_firing? + + now = datetime.now(UTC) + alert.last_checked_at = datetime.now(UTC) + + # IMPORTANT: update next_check_at according to interval + # ensure we don't recheck alert until the next interval is due + alert.next_check_at = (alert.next_check_at or now) + alert_calculation_interval_to_relativedelta( + cast(AlertCalculationInterval, alert.calculation_interval) ) - targets = alert.subscribed_users.all().values_list("email", flat=True) - if not targets: - raise RuntimeError(f"no targets configured for the alert {alert.id}") - for target in targets: - message.add_recipient(email=target) - - logger.info(f"Send notifications about {len(breaches)} anomalies", alert_id=alert.id) - message.send() - - -def _send_notifications_for_errors(alert: AlertConfiguration, error: dict) -> None: - subject = f"PostHog alert {alert.name} check failed to evaluate" - campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" - insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" - alert_url = f"{insight_url}/alerts/{alert.id}" - message = EmailMessage( - campaign_key=campaign_key, - subject=subject, - template_name="alert_check_firing", - template_context={ - "match_descriptions": error, - "insight_url": insight_url, - "insight_name": alert.insight.name, - "alert_url": alert_url, - "alert_name": alert.name, - }, + + if notify: + alert.last_notified_at = now + targets_notified = {"users": list(alert.subscribed_users.all().values_list("email", flat=True))} + + alert_check = AlertCheck.objects.create( + alert_configuration=alert, + calculated_value=value, + condition=alert.condition, + targets_notified=targets_notified, + state=alert.state, + error=error, ) - targets = alert.subscribed_users.all().values_list("email", flat=True) - if not targets: - raise RuntimeError(f"no targets configured for the alert {alert.id}") - for target in targets: - message.add_recipient(email=target) - - logger.info(f"Send notifications about alert checking error", alert_id=alert.id) - message.send() + + alert.save() + + return alert_check diff --git a/posthog/tasks/alerts/test/test_alert_checks.py b/posthog/tasks/alerts/test/test_alert_checks.py index e14c48359aac3..7e90f4293bc41 100644 --- a/posthog/tasks/alerts/test/test_alert_checks.py +++ b/posthog/tasks/alerts/test/test_alert_checks.py @@ -5,7 +5,8 @@ from posthog.models.alert import AlertCheck from posthog.models.instance_setting import set_instance_setting -from posthog.tasks.alerts.checks import _send_notifications_for_breaches, check_alert +from posthog.tasks.alerts.utils import send_notifications_for_breaches +from posthog.tasks.alerts.checks import check_alert from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events, ClickhouseDestroyTablesMixin from posthog.api.test.dashboards import DashboardAPI from posthog.schema import ChartDisplayType, EventsNode, TrendsQuery, TrendsFilter, AlertState @@ -14,8 +15,8 @@ @freeze_time("2024-06-02T08:55:00.000Z") -@patch("posthog.tasks.alerts.checks._send_notifications_for_errors") -@patch("posthog.tasks.alerts.checks._send_notifications_for_breaches") +@patch("posthog.tasks.alerts.checks.send_notifications_for_errors") +@patch("posthog.tasks.alerts.checks.send_notifications_for_breaches") class TestAlertChecks(APIBaseTest, ClickhouseDestroyTablesMixin): def setUp(self) -> None: super().setUp() @@ -52,14 +53,15 @@ def setUp(self) -> None: "type": "TrendsAlertConfig", "series_index": 0, }, - "threshold": {"configuration": {"absoluteThreshold": {}}}, + "condition": {"type": "absolute_value"}, + "threshold": {"configuration": {"type": "absolute", "bounds": {}}}, }, ).json() def set_thresholds(self, lower: Optional[int] = None, upper: Optional[int] = None) -> None: self.client.patch( f"/api/projects/{self.team.id}/alerts/{self.alert['id']}", - data={"threshold": {"configuration": {"absoluteThreshold": {"lower": lower, "upper": upper}}}}, + data={"threshold": {"configuration": {"type": "absolute", "bounds": {"lower": lower, "upper": upper}}}}, ) def get_breach_description(self, mock_send_notifications_for_breaches: MagicMock, call_index: int) -> list[str]: @@ -97,7 +99,7 @@ def test_alert_is_triggered_for_values_above_higher_threshold( anomalies_descriptions = self.get_breach_description(mock_send_notifications_for_breaches, call_index=0) assert len(anomalies_descriptions) == 1 - assert "The trend value (1) is above the upper threshold (0.0)" in anomalies_descriptions[0] + assert "The insight value for previous day is (1) more than upper threshold (0.0)" in anomalies_descriptions[0] def test_alert_is_not_triggered_for_events_beyond_interval( self, mock_send_notifications_for_breaches: MagicMock, mock_send_errors: MagicMock @@ -125,7 +127,7 @@ def test_alert_is_triggered_for_value_below_lower_threshold( assert mock_send_notifications_for_breaches.call_count == 1 anomalies = self.get_breach_description(mock_send_notifications_for_breaches, call_index=0) - assert "The trend value (0) is below the lower threshold (1.0)" in anomalies + assert "The insight value for previous day is (0) less than lower threshold (1.0)" in anomalies def test_alert_triggers_but_does_not_send_notification_during_firing( self, mock_send_notifications_for_breaches: MagicMock, mock_send_errors: MagicMock @@ -225,7 +227,7 @@ def test_send_error_while_calculating( self, _mock_send_notifications_for_breaches: MagicMock, mock_send_notifications_for_errors: MagicMock ) -> None: with patch( - "posthog.tasks.alerts.checks.calculate_for_query_based_insight" + "posthog.tasks.alerts.trends.calculate_for_query_based_insight" ) as mock_calculate_for_query_based_insight: mock_calculate_for_query_based_insight.side_effect = Exception("Some error") @@ -238,7 +240,6 @@ def test_send_error_while_calculating( ) error_message = latest_alert_check.error["message"] - assert "AlertCheckError: error computing aggregate value for insight" in error_message assert "Some error" in error_message def test_error_while_calculating_on_alert_in_firing_state( @@ -254,7 +255,7 @@ def test_error_while_calculating_on_alert_in_firing_state( assert latest_alert_check.error is None with patch( - "posthog.tasks.alerts.checks.calculate_for_query_based_insight" + "posthog.tasks.alerts.trends.calculate_for_query_based_insight" ) as mock_calculate_for_query_based_insight: mock_calculate_for_query_based_insight.side_effect = Exception("Some error") @@ -269,7 +270,6 @@ def test_error_while_calculating_on_alert_in_firing_state( assert latest_alert_check.state == AlertState.ERRORED error_message = latest_alert_check.error["message"] - assert "AlertCheckError: error computing aggregate value for insight" in error_message assert "Some error" in error_message def test_error_while_calculating_on_alert_in_not_firing_state( @@ -285,7 +285,7 @@ def test_error_while_calculating_on_alert_in_not_firing_state( assert latest_alert_check.error is None with patch( - "posthog.tasks.alerts.checks.calculate_for_query_based_insight" + "posthog.tasks.alerts.trends.calculate_for_query_based_insight" ) as mock_calculate_for_query_based_insight: mock_calculate_for_query_based_insight.side_effect = Exception("Some error") @@ -299,7 +299,6 @@ def test_error_while_calculating_on_alert_in_not_firing_state( ) error_message = latest_alert_check.error["message"] - assert "AlertCheckError: error computing aggregate value for insight" in error_message assert "Some error" in error_message def test_alert_with_insight_with_filter( @@ -316,15 +315,15 @@ def test_alert_with_insight_with_filter( assert mock_send_notifications_for_breaches.call_count == 1 anomalies = self.get_breach_description(mock_send_notifications_for_breaches, call_index=0) - assert "The trend value (0) is below the lower threshold (1.0)" in anomalies + assert "The insight value for previous day is (0) less than lower threshold (1.0)" in anomalies - @patch("posthog.tasks.alerts.checks.EmailMessage") + @patch("posthog.tasks.alerts.utils.EmailMessage") def test_send_emails( self, MockEmailMessage: MagicMock, mock_send_notifications_for_breaches: MagicMock, mock_send_errors: MagicMock ) -> None: mocked_email_messages = mock_email_messages(MockEmailMessage) alert = AlertConfiguration.objects.get(pk=self.alert["id"]) - _send_notifications_for_breaches(alert, ["first anomaly description", "second anomaly description"]) + send_notifications_for_breaches(alert, ["first anomaly description", "second anomaly description"]) assert len(mocked_email_messages) == 1 email = mocked_email_messages[0] diff --git a/posthog/tasks/alerts/test/test_trend_alerts.py b/posthog/tasks/alerts/test/test_trends_absolute_alerts.py similarity index 72% rename from posthog/tasks/alerts/test/test_trend_alerts.py rename to posthog/tasks/alerts/test/test_trends_absolute_alerts.py index a5ff389d59f98..48e4228ad0079 100644 --- a/posthog/tasks/alerts/test/test_trend_alerts.py +++ b/posthog/tasks/alerts/test/test_trends_absolute_alerts.py @@ -1,8 +1,7 @@ from typing import Optional, Any -from unittest.mock import MagicMock, patch +from unittest.mock import ANY, MagicMock, patch import dateutil - from freezegun import freeze_time from posthog.models.alert import AlertCheck @@ -29,10 +28,10 @@ FROZEN_TIME = dateutil.parser.parse("2024-06-02T08:55:00.000Z") -@freeze_time("2024-06-02T08:55:00.000Z") -@patch("posthog.tasks.alerts.checks._send_notifications_for_errors") -@patch("posthog.tasks.alerts.checks._send_notifications_for_breaches") -class TestTimeSeriesTrendsAlerts(APIBaseTest, ClickhouseDestroyTablesMixin): +@freeze_time(FROZEN_TIME) +@patch("posthog.tasks.alerts.checks.send_notifications_for_errors") +@patch("posthog.tasks.alerts.checks.send_notifications_for_breaches") +class TestTimeSeriesTrendsAbsoluteAlerts(APIBaseTest, ClickhouseDestroyTablesMixin): def setUp(self) -> None: super().setUp() @@ -54,8 +53,9 @@ def create_alert( "type": "TrendsAlertConfig", "series_index": series_index, }, + "condition": {"type": "absolute_value"}, "calculation_interval": AlertCalculationInterval.DAILY, - "threshold": {"configuration": {"absoluteThreshold": {"lower": lower, "upper": upper}}}, + "threshold": {"configuration": {"type": "absolute", "bounds": {"lower": lower, "upper": upper}}}, }, ).json() @@ -96,7 +96,7 @@ def create_time_series_trend_insight(self, breakdown: Optional[BreakdownFilter] return insight - def test_alert_properties(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: + def test_alert_lower_threshold_breached(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: insight = self.create_time_series_trend_insight() alert = self.create_alert(insight, series_index=0, lower=1) @@ -118,11 +118,15 @@ def test_alert_properties(self, mock_send_breaches: MagicMock, mock_send_errors: assert alert_check.state == AlertState.FIRING assert alert_check.error is None + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week is (0) less than lower threshold (1.0)"] + ) + def test_trend_high_threshold_breached(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: insight = self.create_time_series_trend_insight() alert = self.create_alert(insight, series_index=0, upper=1) - with freeze_time("2024-06-02T07:55:00.000Z"): + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(days=1)): _create_event( team=self.team, event="signed_up", @@ -148,11 +152,15 @@ def test_trend_high_threshold_breached(self, mock_send_breaches: MagicMock, mock assert alert_check.state == AlertState.FIRING assert alert_check.error is None + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week is (2) more than upper threshold (1.0)"] + ) + def test_trend_no_threshold_breached(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: insight = self.create_time_series_trend_insight() alert = self.create_alert(insight, series_index=0, lower=0, upper=2) - with freeze_time("2024-06-02T07:55:00.000Z"): + with freeze_time(FROZEN_TIME - dateutil.relativedelta.relativedelta(days=1)): _create_event( team=self.team, event="signed_up", @@ -171,45 +179,3 @@ def test_trend_no_threshold_breached(self, mock_send_breaches: MagicMock, mock_s assert alert_check.calculated_value == 1 assert alert_check.state == AlertState.NOT_FIRING assert alert_check.error is None - - # TODO: support breakdowns - def test_trend_with_single_breakdown_threshold_breached( - self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock - ) -> None: - insight = self.create_time_series_trend_insight( - breakdown=BreakdownFilter(breakdown_type="event", breakdown="$browser") - ) - alert = self.create_alert(insight, series_index=0, lower=0, upper=1) - - with freeze_time("2024-06-02T07:55:00.000Z"): - _create_event( - team=self.team, - event="signed_up", - distinct_id="1", - properties={"$browser": "Chrome"}, - ) - _create_event( - team=self.team, - event="signed_up", - distinct_id="2", - properties={"$browser": "Chrome"}, - ) - _create_event( - team=self.team, - event="signed_up", - distinct_id="1", - properties={"$browser": "Firefox"}, - ) - flush_persons_and_events() - - check_alert(alert["id"]) - - updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) - assert updated_alert.state == AlertState.FIRING - assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) - - alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") - # calculated value should only be from browser = Chrome - assert alert_check.calculated_value == 2 - assert alert_check.state == AlertState.FIRING - assert alert_check.error is None diff --git a/posthog/tasks/alerts/test/test_trends_relative_alerts.py b/posthog/tasks/alerts/test/test_trends_relative_alerts.py new file mode 100644 index 0000000000000..e6007848e3c92 --- /dev/null +++ b/posthog/tasks/alerts/test/test_trends_relative_alerts.py @@ -0,0 +1,803 @@ +from typing import Optional, Any +from unittest.mock import ANY, MagicMock, patch +import dateutil + + +import dateutil.relativedelta +from freezegun import freeze_time + +from posthog.models.alert import AlertCheck +from posthog.models.instance_setting import set_instance_setting +from posthog.tasks.alerts.checks import check_alert +from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events, ClickhouseDestroyTablesMixin +from posthog.api.test.dashboards import DashboardAPI +from posthog.schema import ( + ChartDisplayType, + EventsNode, + TrendsQuery, + TrendsFilter, + IntervalType, + InsightDateRange, + EventPropertyFilter, + PropertyOperator, + BaseMathType, + AlertState, + AlertCalculationInterval, + AlertConditionType, + InsightThresholdType, + BreakdownFilter, +) +from posthog.models import AlertConfiguration + +# Tuesday +FROZEN_TIME = dateutil.parser.parse("2024-06-04T08:55:00.000Z") + + +@freeze_time(FROZEN_TIME) +@patch("posthog.tasks.alerts.checks.send_notifications_for_errors") +@patch("posthog.tasks.alerts.checks.send_notifications_for_breaches") +class TestTimeSeriesTrendsRelativeAlerts(APIBaseTest, ClickhouseDestroyTablesMixin): + def setUp(self) -> None: + super().setUp() + + set_instance_setting("EMAIL_HOST", "fake_host") + set_instance_setting("EMAIL_ENABLED", True) + + self.dashboard_api = DashboardAPI(self.client, self.team, self.assertEqual) + + def create_alert( + self, + insight: dict, + series_index: int, + condition_type: AlertConditionType, + threshold_type: InsightThresholdType, + lower: Optional[float] = None, + upper: Optional[float] = None, + ) -> dict: + alert = self.client.post( + f"/api/projects/{self.team.id}/alerts", + data={ + "name": "alert name", + "insight": insight["id"], + "subscribed_users": [self.user.id], + "config": { + "type": "TrendsAlertConfig", + "series_index": series_index, + }, + "condition": {"type": condition_type}, + "calculation_interval": AlertCalculationInterval.DAILY, + "threshold": {"configuration": {"type": threshold_type, "bounds": {"lower": lower, "upper": upper}}}, + }, + ).json() + + return alert + + def create_time_series_trend_insight( + self, interval: IntervalType, breakdown: Optional[BreakdownFilter] = None + ) -> dict[str, Any]: + query_dict = TrendsQuery( + series=[ + EventsNode( + event="signed_up", + math=BaseMathType.TOTAL, + properties=[ + EventPropertyFilter( + key="$browser", + operator=PropertyOperator.EXACT, + value=["Chrome"], + ) + ], + ), + EventsNode( + event="$pageview", + name="Pageview", + math=BaseMathType.TOTAL, + ), + ], + breakdownFilter=breakdown, + trendsFilter=TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH), + interval=interval, + dateRange=InsightDateRange(date_from="-8w"), + ).model_dump() + + insight = self.dashboard_api.create_insight( + data={ + "name": "insight", + "query": query_dict, + } + )[1] + + return insight + + def test_alert_properties(self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + # alert if sign ups increase by less than 1 + alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + lower=1, + ) + + assert alert["state"] == AlertState.NOT_FIRING + assert alert["last_checked_at"] is None + assert alert["last_notified_at"] is None + assert alert["next_check_at"] is None + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.last_checked_at == FROZEN_TIME + assert updated_alert.last_notified_at == FROZEN_TIME + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + assert alert_check.calculated_value == 0 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + def test_relative_increase_absolute_upper_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 1 + alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + # Previous to previous interval (last to last week) has 0 events + # add events for previous interval (last week on Sat) + last_sat = FROZEN_TIME - dateutil.relativedelta.relativedelta(days=3) + with freeze_time(last_sat): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + check_alert(alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week increased (2) more than upper threshold (1.0)"] + ) + + def test_relative_increase_upper_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups increase by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 1 event + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 2 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as we had *increase* in events of (2 or 200%) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + def test_relative_increase_lower_threshold_breached_1( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by less than 2 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + lower=2, + ) + + # alert if sign ups increase by less than 20 + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + lower=0.5, # 50% + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 2 events + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 1 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as overall we had *decrease* in events (-1 or -50%) week over week + # check absolute alert + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == -1 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week increased (-1) less than lower threshold (2.0)"] + ) + + # check percentage alert + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == -0.5 # 50% decrease + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, ["The insight value for previous week increased (-50.00%) less than lower threshold (50.00%)"] + ) + + def test_relative_increase_lower_threshold_breached_2( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by less than 2 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + lower=2, + ) + + # alert if sign ups increase by less than 110% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + lower=1.1, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 1 event + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 2 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as overall we had *increase* in events of just (1 or 100%) week over week + # alert required at least 2 + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 1 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 1 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + def test_relative_decrease_upper_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups decrease by more than 1 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=1, + ) + + # alert if sign ups decrease by more than 20% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.2, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 3 event + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 1 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as we had decrease in events of (2 or 200%) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week decreased (2) more than upper threshold (1.0)"] + ) + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == (2 / 3) + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, ["The insight value for previous week decreased (66.67%) more than upper threshold (20.00%)"] + ) + + def test_relative_decrease_lower_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups decrease by less than 2 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + lower=2, + ) + + # alert if sign ups decrease by less than 80% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + lower=0.8, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 2 event + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 1 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert should fire as we had decrease in events of (1 or 50%) week over week + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 1 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_once_with( + ANY, ["The insight value for previous week decreased (1) less than lower threshold (2.0)"] + ) + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + + assert alert_check.calculated_value == 0.5 + assert alert_check.state == AlertState.FIRING + assert alert_check.error is None + + mock_send_breaches.assert_called_with( + ANY, ["The insight value for previous week decreased (50.00%) less than lower threshold (80.00%)"] + ) + + def test_relative_increase_no_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 4 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=4, + ) + + # alert if sign ups increase by more than 400% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_INCREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=4, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 1 event + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 3 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert shouldn't fire as increase was only of 2 or 200% + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + def test_relative_decrease_no_threshold_breached( + self, mock_send_breaches: MagicMock, mock_send_errors: MagicMock + ) -> None: + insight = self.create_time_series_trend_insight(interval=IntervalType.WEEK) + + # alert if sign ups increase by more than 4 + absolute_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.ABSOLUTE, + upper=4, + ) + + # alert if sign ups decrease by more than 80% + percentage_alert = self.create_alert( + insight, + series_index=0, + condition_type=AlertConditionType.RELATIVE_DECREASE, + threshold_type=InsightThresholdType.PERCENTAGE, + upper=0.8, + ) + + # FROZEN_TIME is on Tue, insight has weekly interval + # we aggregate our weekly insight numbers to display for Sun (19th May, 26th May, 2nd June) + + # set previous to previous interval (last to last week) to have 3 events + last_to_last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=2) + + with freeze_time(last_to_last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="4", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="signed_up", + distinct_id="2", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # set previous interval to have 1 event + # add events for last week (last Tue) + last_tue = FROZEN_TIME - dateutil.relativedelta.relativedelta(weeks=1) + with freeze_time(last_tue): + _create_event( + team=self.team, + event="signed_up", + distinct_id="3", + properties={"$browser": "Chrome"}, + ) + flush_persons_and_events() + + # alert shouldn't fire as increase was only of 2 or 200% + check_alert(absolute_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=absolute_alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=absolute_alert["id"]).latest("created_at") + assert alert_check.calculated_value == 2 + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None + + check_alert(percentage_alert["id"]) + + updated_alert = AlertConfiguration.objects.get(pk=percentage_alert["id"]) + assert updated_alert.state == AlertState.NOT_FIRING + assert updated_alert.next_check_at == FROZEN_TIME + dateutil.relativedelta.relativedelta(days=1) + + alert_check = AlertCheck.objects.filter(alert_configuration=percentage_alert["id"]).latest("created_at") + assert alert_check.calculated_value == (2 / 3) + assert alert_check.state == AlertState.NOT_FIRING + assert alert_check.error is None diff --git a/posthog/tasks/alerts/trends.py b/posthog/tasks/alerts/trends.py new file mode 100644 index 0000000000000..437685afbe000 --- /dev/null +++ b/posthog/tasks/alerts/trends.py @@ -0,0 +1,239 @@ +from typing import Optional, cast + +from posthog.api.services.query import ExecutionMode +from posthog.caching.calculate_results import calculate_for_query_based_insight + +from posthog.models import AlertConfiguration, Insight +from posthog.schema import ( + TrendsQuery, + IntervalType, + TrendsAlertConfig, + InsightThreshold, + AlertCondition, + AlertConditionType, + InsightsThresholdBounds, + InsightThresholdType, +) +from posthog.caching.fetch_from_cache import InsightResult +from typing import TypedDict, NotRequired +from posthog.tasks.alerts.utils import ( + AlertEvaluationResult, + NON_TIME_SERIES_DISPLAY_TYPES, +) + + +# TODO: move the TrendResult UI type to schema.ts and use that instead +class TrendResult(TypedDict): + action: dict + actions: list[dict] + count: int + data: list[float] + days: list[str] + dates: list[str] + label: str + labels: list[str] + breakdown_value: str | int | list[str] + aggregated_value: NotRequired[float] + status: str | None + compare_label: str | None + compare: bool + persons_urls: list[dict] + persons: dict + filter: dict + + +def check_trends_alert(alert: AlertConfiguration, insight: Insight, query: TrendsQuery) -> AlertEvaluationResult: + if "type" in alert.config and alert.config["type"] == "TrendsAlertConfig": + config = TrendsAlertConfig.model_validate(alert.config) + else: + ValueError(f"Unsupported alert config type: {alert.config}") + + condition = AlertCondition.model_validate(alert.condition) + threshold = InsightThreshold.model_validate(alert.threshold.configuration) if alert.threshold else None + + if not threshold: + return AlertEvaluationResult(value=0, breaches=[]) + + match condition.type: + case AlertConditionType.ABSOLUTE_VALUE: + if threshold.type != InsightThresholdType.ABSOLUTE: + raise ValueError(f"Absolute threshold not configured for alert condition ABSOLUTE_VALUE") + + # want value for current interval (last hour, last day, last week, last month) + # depending on the alert calculation interval + if _is_non_time_series_trend(query): + filters_override = _date_range_override_for_intervals(query, last_x_intervals=2) + else: + # for non time series, it's an aggregated value for full interval + # so we need to compute full insight + filters_override = None + + calculation_result = calculate_for_query_based_insight( + insight, + team=alert.team, + execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + user=None, + filters_override=filters_override, + ) + + if not calculation_result.result: + raise RuntimeError(f"No results found for insight with alert id = {alert.id}") + + prev_interval_value = _pick_interval_value_from_trend_result(config, query, calculation_result, -1) + breaches = _validate_bounds( + threshold.bounds, prev_interval_value, threshold.type, condition.type, query.interval + ) + + return AlertEvaluationResult(value=prev_interval_value, breaches=breaches) + + case AlertConditionType.RELATIVE_INCREASE: + if _is_non_time_series_trend(query): + raise ValueError(f"Relative alerts not supported for non time series trends") + + # to measure relative increase, we can't alert until current interval has completed + # as to check increase less than X, we need interval to complete + # so we need to compute the trend values for last 3 intervals + # and then compare the previous interval with value for the interval before previous + filters_overrides = _date_range_override_for_intervals(query, last_x_intervals=3) + + calculation_result = calculate_for_query_based_insight( + insight, + team=alert.team, + execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + user=None, + filters_override=filters_overrides, + ) + + prev_interval_value = _pick_interval_value_from_trend_result(config, query, calculation_result, -1) + prev_prev_interval_value = _pick_interval_value_from_trend_result(config, query, calculation_result, -2) + + if threshold.type == InsightThresholdType.ABSOLUTE: + increase = prev_interval_value - prev_prev_interval_value + breaches = _validate_bounds(threshold.bounds, increase, threshold.type, condition.type, query.interval) + elif threshold.type == InsightThresholdType.PERCENTAGE: + increase = (prev_interval_value - prev_prev_interval_value) / prev_prev_interval_value + breaches = _validate_bounds(threshold.bounds, increase, threshold.type, condition.type, query.interval) + else: + raise ValueError( + f"Neither relative nor absolute threshold configured for alert condition RELATIVE_INCREASE" + ) + + return AlertEvaluationResult(value=increase, breaches=breaches) + + case AlertConditionType.RELATIVE_DECREASE: + if _is_non_time_series_trend(query): + raise ValueError(f"Relative alerts not supported for non time series trends") + + # to measure relative decrease, we can't alert until current interval has completed + # as to check decrease more than X, we need interval to complete + # so we need to compute the trend values for last 3 intervals + # and then compare the previous interval with value for the interval before previous + filters_overrides = _date_range_override_for_intervals(query, last_x_intervals=3) + + calculation_result = calculate_for_query_based_insight( + insight, + team=alert.team, + execution_mode=ExecutionMode.RECENT_CACHE_CALCULATE_BLOCKING_IF_STALE, + user=None, + filters_override=filters_overrides, + ) + + prev_interval_value = _pick_interval_value_from_trend_result(config, query, calculation_result, -1) + prev_prev_interval_value = _pick_interval_value_from_trend_result(config, query, calculation_result, -2) + + if threshold.type == InsightThresholdType.ABSOLUTE: + decrease = prev_prev_interval_value - prev_interval_value + breaches = _validate_bounds(threshold.bounds, decrease, threshold.type, condition.type, query.interval) + elif threshold.type == InsightThresholdType.PERCENTAGE: + decrease = (prev_prev_interval_value - prev_interval_value) / prev_prev_interval_value + breaches = _validate_bounds(threshold.bounds, decrease, threshold.type, condition.type, query.interval) + else: + raise ValueError( + f"Neither relative nor absolute threshold configured for alert condition RELATIVE_INCREASE" + ) + + return AlertEvaluationResult(value=decrease, breaches=breaches) + + case _: + raise NotImplementedError(f"Unsupported alert condition type: {condition.type}") + + +def _is_non_time_series_trend(query: TrendsQuery) -> bool: + return bool(query.trendsFilter and query.trendsFilter.display in NON_TIME_SERIES_DISPLAY_TYPES) + + +def _date_range_override_for_intervals(query: TrendsQuery, last_x_intervals: int = 1) -> Optional[dict]: + """ + Resulting filter overrides don't set 'date_to' so we always get value for current interval. + last_x_intervals controls how many intervals to look back to + """ + assert last_x_intervals > 0 + + match query.interval: + case IntervalType.DAY: + date_from = f"-{last_x_intervals}d" + case IntervalType.WEEK: + date_from = f"-{last_x_intervals}w" + case IntervalType.MONTH: + date_from = f"-{last_x_intervals}m" + case _: + date_from = f"-{last_x_intervals}h" + + return {"date_from": date_from} + + +def _pick_interval_value_from_trend_result( + config: TrendsAlertConfig, query: TrendsQuery, results: InsightResult, interval_to_pick: int = 0 +) -> float: + """ + interval_to_pick to controls whether to pick value for current (0), last (-1), one before last (-2)... + """ + assert interval_to_pick <= 0 + + series_index = config.series_index + result = cast(list[TrendResult], results.result)[series_index] + + if _is_non_time_series_trend(query): + # only one value in result + return result["aggregated_value"] + + data = result["data"] + # data is pre sorted in ascending order of timestamps + index_from_back = len(data) - 1 + interval_to_pick + return data[index_from_back] + + +def _validate_bounds( + bounds: InsightsThresholdBounds | None, + calculated_value: float, + threshold_type: InsightThresholdType, + condition_type: AlertConditionType, + interval_type: IntervalType | None, +) -> list[str]: + if not bounds: + return [] + + is_percentage = threshold_type == InsightThresholdType.PERCENTAGE + + formatted_value = f"{calculated_value:.2%}" if is_percentage else calculated_value + + match condition_type: + case AlertConditionType.ABSOLUTE_VALUE: + condition_text = "is" + case AlertConditionType.RELATIVE_INCREASE: + condition_text = "increased" + case AlertConditionType.RELATIVE_DECREASE: + condition_text = "decreased" + + if bounds.lower is not None and calculated_value < bounds.lower: + lower_value = f"{bounds.lower:.2%}" if is_percentage else bounds.lower + return [ + f"The insight value for previous {interval_type or 'interval'} {condition_text} ({formatted_value}) less than lower threshold ({lower_value})" + ] + if bounds.upper is not None and calculated_value > bounds.upper: + upper_value = f"{bounds.upper:.2%}" if is_percentage else bounds.upper + return [ + f"The insight value for previous {interval_type or 'interval'} {condition_text} ({formatted_value}) more than upper threshold ({upper_value})" + ] + + return [] diff --git a/posthog/tasks/alerts/utils.py b/posthog/tasks/alerts/utils.py new file mode 100644 index 0000000000000..06b94cc938089 --- /dev/null +++ b/posthog/tasks/alerts/utils.py @@ -0,0 +1,110 @@ +from dateutil.relativedelta import relativedelta + +from django.utils import timezone +import structlog + +from posthog.email import EmailMessage +from posthog.models import AlertConfiguration +from posthog.schema import ( + ChartDisplayType, + NodeKind, + AlertCalculationInterval, +) +from dataclasses import dataclass + +logger = structlog.get_logger(__name__) + + +@dataclass +class AlertEvaluationResult: + value: float | None + breaches: list[str] | None + + +WRAPPER_NODE_KINDS = [NodeKind.DATA_TABLE_NODE, NodeKind.DATA_VISUALIZATION_NODE, NodeKind.INSIGHT_VIZ_NODE] + +NON_TIME_SERIES_DISPLAY_TYPES = { + ChartDisplayType.BOLD_NUMBER, + ChartDisplayType.ACTIONS_PIE, + ChartDisplayType.ACTIONS_BAR_VALUE, + ChartDisplayType.ACTIONS_TABLE, + ChartDisplayType.WORLD_MAP, +} + + +def calculation_interval_to_order(interval: AlertCalculationInterval | None) -> int: + match interval: + case AlertCalculationInterval.HOURLY: + return 0 + case AlertCalculationInterval.DAILY: + return 1 + case _: + return 2 + + +def alert_calculation_interval_to_relativedelta(alert_calculation_interval: AlertCalculationInterval) -> relativedelta: + match alert_calculation_interval: + case AlertCalculationInterval.HOURLY: + return relativedelta(hours=1) + case AlertCalculationInterval.DAILY: + return relativedelta(days=1) + case AlertCalculationInterval.WEEKLY: + return relativedelta(weeks=1) + case AlertCalculationInterval.MONTHLY: + return relativedelta(months=1) + case _: + raise ValueError(f"Invalid alert calculation interval: {alert_calculation_interval}") + + +def send_notifications_for_breaches(alert: AlertConfiguration, breaches: list[str]) -> None: + subject = f"PostHog alert {alert.name} is firing" + campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" + insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" + alert_url = f"{insight_url}/alerts/{alert.id}" + message = EmailMessage( + campaign_key=campaign_key, + subject=subject, + template_name="alert_check_firing", + template_context={ + "match_descriptions": breaches, + "insight_url": insight_url, + "insight_name": alert.insight.name, + "alert_url": alert_url, + "alert_name": alert.name, + }, + ) + targets = alert.subscribed_users.all().values_list("email", flat=True) + if not targets: + raise RuntimeError(f"no targets configured for the alert {alert.id}") + for target in targets: + message.add_recipient(email=target) + + logger.info(f"Send notifications about {len(breaches)} anomalies", alert_id=alert.id) + message.send() + + +def send_notifications_for_errors(alert: AlertConfiguration, error: dict) -> None: + subject = f"PostHog alert {alert.name} check failed to evaluate" + campaign_key = f"alert-firing-notification-{alert.id}-{timezone.now().timestamp()}" + insight_url = f"/project/{alert.team.pk}/insights/{alert.insight.short_id}?alert_id={alert.id}" + alert_url = f"{insight_url}/alerts/{alert.id}" + message = EmailMessage( + campaign_key=campaign_key, + subject=subject, + template_name="alert_check_firing", + template_context={ + "match_descriptions": error, + "insight_url": insight_url, + "insight_name": alert.insight.name, + "alert_url": alert_url, + "alert_name": alert.name, + }, + ) + targets = alert.subscribed_users.all().values_list("email", flat=True) + if not targets: + raise RuntimeError(f"no targets configured for the alert {alert.id}") + for target in targets: + message.add_recipient(email=target) + + logger.info(f"Send notifications about alert checking error", alert_id=alert.id) + message.send() diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index bae606bc1022e..cc4697339daa4 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -96,6 +96,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -259,7 +260,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -272,6 +272,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", @@ -359,6 +360,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -428,7 +430,6 @@ "posthog_user"."first_name", "posthog_user"."last_name", "posthog_user"."is_staff", - "posthog_user"."is_active", "posthog_user"."date_joined", "posthog_user"."uuid", "posthog_user"."current_organization_id", @@ -441,6 +442,7 @@ "posthog_user"."requested_password_reset_at", "posthog_user"."has_seen_product_intro_for", "posthog_user"."strapi_id", + "posthog_user"."is_active", "posthog_user"."theme_mode", "posthog_user"."partial_notification_settings", "posthog_user"."anonymize_data", diff --git a/posthog/temporal/batch_exports/batch_exports.py b/posthog/temporal/batch_exports/batch_exports.py index 4c1114cfb2cdf..16d4ccdacf0d0 100644 --- a/posthog/temporal/batch_exports/batch_exports.py +++ b/posthog/temporal/batch_exports/batch_exports.py @@ -1,3 +1,5 @@ +import asyncio +import collections import collections.abc import dataclasses import datetime as dt @@ -251,6 +253,135 @@ async def iter_records_from_model_view( yield record_batch +class RecordBatchQueue(asyncio.Queue): + """A queue of pyarrow RecordBatch instances limited by bytes.""" + + def __init__(self, max_size_bytes=0): + super().__init__(maxsize=max_size_bytes) + self._bytes_size = 0 + self._schema_set = asyncio.Event() + self.record_batch_schema = None + # This is set by `asyncio.Queue.__init__` calling `_init` + self._queue: collections.deque + + def _get(self) -> pa.RecordBatch: + """Override parent `_get` to keep track of bytes.""" + item = self._queue.popleft() + self._bytes_size -= item.get_total_buffer_size() + return item + + def _put(self, item: pa.RecordBatch) -> None: + """Override parent `_put` to keep track of bytes.""" + self._bytes_size += item.get_total_buffer_size() + + if not self._schema_set.is_set(): + self.set_schema(item) + + self._queue.append(item) + + def set_schema(self, record_batch: pa.RecordBatch) -> None: + """Used to keep track of schema of events in queue.""" + self.record_batch_schema = record_batch.schema + self._schema_set.set() + + async def get_schema(self) -> pa.Schema: + """Return the schema of events in queue. + + Currently, this is not enforced. It's purely for reporting to users of + the queue what do the record batches look like. It's up to the producer + to ensure all record batches have the same schema. + """ + await self._schema_set.wait() + return self.record_batch_schema + + def qsize(self) -> int: + """Size in bytes of record batches in the queue. + + This is used to determine when the queue is full, so it returns the + number of bytes. + """ + return self._bytes_size + + +def start_produce_batch_export_record_batches( + client: ClickHouseClient, + model_name: str, + is_backfill: bool, + team_id: int, + interval_start: str, + interval_end: str, + fields: list[BatchExportField] | None = None, + destination_default_fields: list[BatchExportField] | None = None, + **parameters, +): + """Start producing batch export record batches from a model query. + + Depending on the model, we issue a query to ClickHouse and initialize a + producer to stream record batches to a queue. Callers can then consume from + this queue as the record batches arrive. The producer runs asynchronously as + a background task, which is returned. + + Returns: + A tuple containing the record batch queue, an event used by the producer + to indicate there is nothing more to produce, and a reference to the + producer task + """ + if fields is None: + if destination_default_fields is None: + fields = default_fields() + else: + fields = destination_default_fields + + if model_name == "persons": + view = SELECT_FROM_PERSONS_VIEW + + else: + if parameters.get("exclude_events", None): + parameters["exclude_events"] = list(parameters["exclude_events"]) + else: + parameters["exclude_events"] = [] + + if parameters.get("include_events", None): + parameters["include_events"] = list(parameters["include_events"]) + else: + parameters["include_events"] = [] + + if str(team_id) in settings.UNCONSTRAINED_TIMESTAMP_TEAM_IDS: + query_template = SELECT_FROM_EVENTS_VIEW_UNBOUNDED + elif is_backfill: + query_template = SELECT_FROM_EVENTS_VIEW_BACKFILL + else: + query_template = SELECT_FROM_EVENTS_VIEW + lookback_days = settings.OVERRIDE_TIMESTAMP_TEAM_IDS.get(team_id, settings.DEFAULT_TIMESTAMP_LOOKBACK_DAYS) + parameters["lookback_days"] = lookback_days + + if "_inserted_at" not in [field["alias"] for field in fields]: + control_fields = [BatchExportField(expression="_inserted_at", alias="_inserted_at")] + else: + control_fields = [] + + query_fields = ",".join(f"{field['expression']} AS {field['alias']}" for field in fields + control_fields) + + view = query_template.substitute(fields=query_fields) + + parameters["team_id"] = team_id + parameters["interval_start"] = dt.datetime.fromisoformat(interval_start).strftime("%Y-%m-%d %H:%M:%S") + parameters["interval_end"] = dt.datetime.fromisoformat(interval_end).strftime("%Y-%m-%d %H:%M:%S") + extra_query_parameters = parameters.pop("extra_query_parameters", {}) or {} + parameters = {**parameters, **extra_query_parameters} + + queue = RecordBatchQueue(max_size_bytes=settings.BATCH_EXPORT_BUFFER_QUEUE_MAX_SIZE_BYTES) + query_id = uuid.uuid4() + done_event = asyncio.Event() + produce_task = asyncio.create_task( + client.aproduce_query_as_arrow_record_batches( + view, queue=queue, done_event=done_event, query_parameters=parameters, query_id=str(query_id) + ) + ) + + return queue, done_event, produce_task + + def iter_records( client: ClickHouseClient, team_id: int, diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index 9da8c89e56e53..521c6b1d92f85 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -3,9 +3,12 @@ import contextlib import dataclasses import datetime as dt +import functools import json +import operator import pyarrow as pa +import structlog from django.conf import settings from google.cloud import bigquery from google.oauth2 import service_account @@ -27,8 +30,8 @@ default_fields, execute_batch_export_insert_activity, get_data_interval, - iter_model_records, start_batch_export_run, + start_produce_batch_export_record_batches, ) from posthog.temporal.batch_exports.metrics import ( get_bytes_exported_metric, @@ -42,18 +45,19 @@ ) from posthog.temporal.batch_exports.utils import ( JsonType, - apeek_first_and_rewind, cast_record_batch_json_columns, set_status_to_running_task, ) from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater -from posthog.temporal.common.logger import bind_temporal_worker_logger +from posthog.temporal.common.logger import configure_temporal_worker_logger from posthog.temporal.common.utils import ( BatchExportHeartbeatDetails, should_resume_from_activity_heartbeat, ) +logger = structlog.get_logger() + def get_bigquery_fields_from_record_schema( record_schema: pa.Schema, known_json_columns: list[str] @@ -72,6 +76,9 @@ def get_bigquery_fields_from_record_schema( bq_schema: list[bigquery.SchemaField] = [] for name in record_schema.names: + if name == "_inserted_at": + continue + pa_field = record_schema.field(name) if pa.types.is_string(pa_field.type) or isinstance(pa_field.type, JsonType): @@ -264,8 +271,13 @@ async def load_parquet_file(self, parquet_file, table, table_schema): schema=table_schema, ) - load_job = self.load_table_from_file(parquet_file, table, job_config=job_config, rewind=True) - return await asyncio.to_thread(load_job.result) + await logger.adebug("Creating BigQuery load job for Parquet file '%s'", parquet_file) + load_job = await asyncio.to_thread( + self.load_table_from_file, parquet_file, table, job_config=job_config, rewind=True + ) + await logger.adebug("Waiting for BigQuery load job for Parquet file '%s'", parquet_file) + result = await asyncio.to_thread(load_job.result) + return result async def load_jsonl_file(self, jsonl_file, table, table_schema): """Execute a COPY FROM query with given connection to copy contents of jsonl_file.""" @@ -274,8 +286,14 @@ async def load_jsonl_file(self, jsonl_file, table, table_schema): schema=table_schema, ) - load_job = self.load_table_from_file(jsonl_file, table, job_config=job_config, rewind=True) - return await asyncio.to_thread(load_job.result) + await logger.adebug("Creating BigQuery load job for JSONL file '%s'", jsonl_file) + load_job = await asyncio.to_thread( + self.load_table_from_file, jsonl_file, table, job_config=job_config, rewind=True + ) + + await logger.adebug("Waiting for BigQuery load job for JSONL file '%s'", jsonl_file) + result = await asyncio.to_thread(load_job.result) + return result @contextlib.contextmanager @@ -327,7 +345,9 @@ def bigquery_default_fields() -> list[BatchExportField]: @activity.defn async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to BigQuery.""" - logger = await bind_temporal_worker_logger(team_id=inputs.team_id, destination="BigQuery") + logger = await configure_temporal_worker_logger( + logger=structlog.get_logger(), team_id=inputs.team_id, destination="BigQuery" + ) await logger.ainfo( "Batch exporting range %s - %s to BigQuery: %s.%s.%s", inputs.data_interval_start, @@ -357,24 +377,52 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records field.name for field in dataclasses.fields(inputs) }: model = inputs.batch_export_model + if model is not None: + model_name = model.name + extra_query_parameters = model.schema["values"] if model.schema is not None else None + fields = model.schema["fields"] if model.schema is not None else None + else: + model_name = "events" + extra_query_parameters = None + fields = None else: model = inputs.batch_export_schema + model_name = "custom" + extra_query_parameters = model["values"] if model is not None else {} + fields = model["fields"] if model is not None else None - records_iterator = iter_model_records( + queue, done_event, produce_task = start_produce_batch_export_record_batches( client=client, - model=model, + model_name=model_name, + is_backfill=inputs.is_backfill, team_id=inputs.team_id, interval_start=data_interval_start, interval_end=inputs.data_interval_end, exclude_events=inputs.exclude_events, include_events=inputs.include_events, + fields=fields, destination_default_fields=bigquery_default_fields(), - is_backfill=inputs.is_backfill, + extra_query_parameters=extra_query_parameters, ) - first_record_batch, records_iterator = await apeek_first_and_rewind(records_iterator) - if first_record_batch is None: + get_schema_task = asyncio.create_task(queue.get_schema()) + wait_for_producer_done_task = asyncio.create_task(done_event.wait()) + + await asyncio.wait([get_schema_task, wait_for_producer_done_task], return_when=asyncio.FIRST_COMPLETED) + + # Finishing producing happens sequentially after putting to queue and setting the schema. + # So, either we finished both tasks, or we finished without putting anything in the queue. + if get_schema_task.done(): + # In the first case, we'll land here. + # The schema is available, and the queue is not empty, so we can start the batch export. + record_batch_schema = get_schema_task.result() + elif wait_for_producer_done_task.done(): + # In the second case, we'll land here. + # The schema is not available as the queue is empty. + # Since we finished producing with an empty queue, there is nothing to batch export. return 0 + else: + raise Exception("Unreachable") if inputs.use_json_type is True: json_type = "JSON" @@ -383,8 +431,6 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records json_type = "STRING" json_columns = [] - first_record_batch = cast_record_batch_json_columns(first_record_batch, json_columns=json_columns) - if model is None or (isinstance(model, BatchExportModel) and model.name == "events"): schema = [ bigquery.SchemaField("uuid", "STRING"), @@ -401,9 +447,7 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records bigquery.SchemaField("bq_ingested_timestamp", "TIMESTAMP"), ] else: - column_names = [column for column in first_record_batch.schema.names if column != "_inserted_at"] - record_schema = first_record_batch.select(column_names).schema - schema = get_bigquery_fields_from_record_schema(record_schema, known_json_columns=json_columns) + schema = get_bigquery_fields_from_record_schema(record_batch_schema, known_json_columns=json_columns) rows_exported = get_rows_exported_metric() bytes_exported = get_bytes_exported_metric() @@ -446,41 +490,47 @@ async def flush_to_bigquery( last: bool, error: Exception | None, ): + table = bigquery_stage_table if requires_merge else bigquery_table await logger.adebug( - "Loading %s records of size %s bytes", + "Loading %s records of size %s bytes to BigQuery table '%s'", records_since_last_flush, bytes_since_last_flush, + table, ) - table = bigquery_stage_table if requires_merge else bigquery_table await bq_client.load_jsonl_file(local_results_file, table, schema) + await logger.adebug("Loading to BigQuery table '%s' finished", table) rows_exported.add(records_since_last_flush) bytes_exported.add(bytes_since_last_flush) heartbeater.details = (str(last_inserted_at),) - record_schema = pa.schema( - # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other - # record batches have them as nullable. - # Until we figure it out, we set all fields to nullable. There are some fields we know - # are not nullable, but I'm opting for the more flexible option until we out why schemas differ - # between batches. - [ - field.with_nullable(True) - for field in first_record_batch.select([field.name for field in schema]).schema - ] - ) - writer = JSONLBatchExportWriter( - max_bytes=settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, - flush_callable=flush_to_bigquery, - ) + flush_tasks = [] + while not queue.empty() or not done_event.is_set(): + await logger.adebug("Starting record batch writer") + flush_start_event = asyncio.Event() + task = asyncio.create_task( + consume_batch_export_record_batches( + queue, + done_event, + flush_start_event, + flush_to_bigquery, + json_columns, + settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, + ) + ) + + await flush_start_event.wait() - async with writer.open_temporary_file(): - async for record_batch in records_iterator: - record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) + flush_tasks.append(task) + + await logger.adebug( + "Finished producing and consuming all record batches, now waiting on any pending flush tasks" + ) + await asyncio.wait(flush_tasks) - await writer.write_record_batch(record_batch) + records_total = functools.reduce(operator.add, (task.result() for task in flush_tasks)) if requires_merge: merge_key = ( @@ -494,7 +544,74 @@ async def flush_to_bigquery( update_fields=schema, ) - return writer.records_total + return records_total + + +async def consume_batch_export_record_batches( + queue: asyncio.Queue, + done_event: asyncio.Event, + flush_start_event: asyncio.Event, + flush_to_bigquery: FlushCallable, + json_columns: list[str], + max_bytes: int, +): + """Consume batch export record batches from queue into a writing loop. + + Each record will be written to a temporary file, and flushed after + configured `max_bytes`. Flush is done on context manager exit by + `JSONLBatchExportWriter`. + + This coroutine reports when flushing will start by setting the + `flush_start_event`. This is used by the main thread to start a new writer + task as flushing is about to begin, since that can be too slow to do + sequentially. + + If there are not enough events to fill up `max_bytes`, the writing + loop will detect that there are no more events produced and shut itself off + by using the `done_event`, which should be set by the queue producer. + + Arguments: + queue: The queue we will be listening on for record batches. + done_event: Event set by producer when done. + flush_to_start_event: Event set by us when flushing is to about to + start. + json_columns: Used to cast columns of the record batch to JSON. + max_bytes: Max bytes to write before flushing. + + Returns: + Number of total records written and flushed in this task. + """ + writer = JSONLBatchExportWriter( + max_bytes=max_bytes, + flush_callable=flush_to_bigquery, + ) + + async with writer.open_temporary_file(): + await logger.adebug("Starting record batch writing loop") + while True: + try: + record_batch = queue.get_nowait() + except asyncio.QueueEmpty: + if done_event.is_set(): + await logger.adebug("Empty queue with no more events being produced, closing writer loop") + flush_start_event.set() + # Exit context manager to trigger flush + break + else: + await asyncio.sleep(0.1) + continue + + record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) + await writer.write_record_batch(record_batch, flush=False) + + if writer.should_flush(): + await logger.adebug("Writer finished, ready to flush events") + flush_start_event.set() + # Exit context manager to trigger flush + break + + await logger.adebug("Completed %s records", writer.records_total) + return writer.records_total def get_batch_export_writer( diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index e64305b2a1735..d49d18fec92e7 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -211,7 +211,7 @@ def redshift_default_fields() -> list[BatchExportField]: def get_redshift_fields_from_record_schema( - record_schema: pa.Schema, known_super_columns: list[str], use_super: bool = False + record_schema: pa.Schema, known_super_columns: list[str], use_super: bool ) -> Fields: """Generate a list of supported Redshift fields from PyArrow schema. @@ -267,6 +267,8 @@ async def insert_records_to_redshift( schema: str | None, table: str, batch_size: int = 100, + use_super: bool = False, + known_super_columns: list[str] | None = None, ) -> int: """Execute an INSERT query with given Redshift connection. @@ -302,7 +304,14 @@ async def insert_records_to_redshift( table=table_identifier, fields=sql.SQL(", ").join(map(sql.Identifier, columns)), ) - template = sql.SQL("({})").format(sql.SQL(", ").join(map(sql.Placeholder, columns))) + placeholders: list[sql.Composable] = [] + for column in columns: + if use_super is True and known_super_columns is not None and column in known_super_columns: + placeholders.append(sql.SQL("JSON_PARSE({placeholder})").format(placeholder=sql.Placeholder(column))) + else: + placeholders.append(sql.Placeholder(column)) + + template = sql.SQL("({})").format(sql.SQL(", ").join(placeholders)) rows_exported = get_rows_exported_metric() total_rows_exported = 0 @@ -324,6 +333,10 @@ async def flush_to_redshift(batch): # in the future if we decide it's useful enough. async for record in records_iterator: + for column in columns: + if known_super_columns is not None and column in known_super_columns: + record[column] = json.dumps(record[column], ensure_ascii=False) + batch.append(cursor.mogrify(template, record).encode("utf-8")) if len(batch) < batch_size: continue @@ -471,9 +484,7 @@ def map_to_record(row: dict) -> dict: for column in known_super_columns: if record.get(column, None) is not None: # TODO: We should be able to save a json.loads here. - record[column] = json.dumps( - remove_escaped_whitespace_recursive(json.loads(record[column])), ensure_ascii=False - ) + record[column] = remove_escaped_whitespace_recursive(json.loads(record[column])) return record @@ -487,6 +498,8 @@ async def record_generator() -> collections.abc.AsyncGenerator[dict[str, typing. redshift_client, inputs.schema, redshift_stage_table if requires_merge else redshift_table, + use_super=properties_type == "SUPER", + known_super_columns=known_super_columns, ) if requires_merge: diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index 4d7dc45df5496..19973d3d84617 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -7,6 +7,7 @@ import csv import datetime as dt import gzip +import json import tempfile import typing @@ -96,6 +97,9 @@ def __exit__(self, exc, value, tb): def __iter__(self): yield from self._file + def __str__(self) -> str: + return self._file.name + @property def brotli_compressor(self): if self._brotli_compressor is None: @@ -387,7 +391,7 @@ def track_bytes_written(self, batch_export_file: BatchExportTemporaryFile) -> No self.bytes_total = batch_export_file.bytes_total self.bytes_since_last_flush = batch_export_file.bytes_since_last_reset - async def write_record_batch(self, record_batch: pa.RecordBatch) -> None: + async def write_record_batch(self, record_batch: pa.RecordBatch, flush: bool = True) -> None: """Issue a record batch write tracking progress and flushing if required.""" record_batch = record_batch.sort_by("_inserted_at") last_inserted_at = record_batch.column("_inserted_at")[-1].as_py() @@ -401,9 +405,12 @@ async def write_record_batch(self, record_batch: pa.RecordBatch) -> None: self.track_records_written(record_batch) self.track_bytes_written(self.batch_export_file) - if self.bytes_since_last_flush >= self.max_bytes: + if flush and self.should_flush(): await self.flush(last_inserted_at) + def should_flush(self) -> bool: + return self.bytes_since_last_flush >= self.max_bytes + async def flush(self, last_inserted_at: dt.datetime, is_last: bool = False) -> None: """Call the provided `flush_callable` and reset underlying file. @@ -457,16 +464,43 @@ def write_dict(self, d: dict[str, typing.Any]) -> int: """Write a single row of JSONL.""" try: n = self.batch_export_file.write(orjson.dumps(d, default=str) + b"\n") - except orjson.JSONEncodeError: - logger.exception("Failed to encode with orjson: %s", d) - # orjson is very strict about invalid unicode. This slow path protects us against - # things we've observed in practice, like single surrogate codes, e.g. "\ud83d" - cleaned_content = replace_broken_unicode(d) - n = self.batch_export_file.write(orjson.dumps(cleaned_content, default=str) + b"\n") - except TypeError: - logger.exception("Orjson detected a deeply nested dict: %s", d) - raise - + except orjson.JSONEncodeError as err: + # NOTE: `orjson.JSONEncodeError` is actually just an alias for `TypeError`. + # This handler will catch everything coming from orjson, so we have to + # awkwardly check error messages. + if str(err) == "Recursion limit reached": + # Orjson enforces an unmodifiable recursion limit (256), so we can't + # dump very nested dicts. + if d.get("event", None) == "$web_vitals": + # These are PostHog events that for a while included a bunch of + # nested DOM structures. Eventually, this was removed, but these + # events could still be present in database. + # Let's try to clear the key with nested elements first. + try: + del d["properties"]["$web_vitals_INP_event"]["attribution"]["interactionTargetElement"] + except KeyError: + # We tried, fallback to the slower but more permissive stdlib + # json. + logger.exception("PostHog $web_vitals event didn't match expected structure") + dumped = json.dumps(d).encode("utf-8") + n = self.batch_export_file.write(dumped + b"\n") + else: + dumped = orjson.dumps(d, default=str) + n = self.batch_export_file.write(dumped + b"\n") + + else: + # In this case, we fallback to the slower but more permissive stdlib + # json. + logger.exception("Orjson detected a deeply nested dict: %s", d) + dumped = json.dumps(d).encode("utf-8") + n = self.batch_export_file.write(dumped + b"\n") + else: + # Orjson is very strict about invalid unicode. This slow path protects us + # against things we've observed in practice, like single surrogate codes, e.g. + # "\ud83d" + logger.exception("Failed to encode with orjson: %s", d) + cleaned_content = replace_broken_unicode(d) + n = self.batch_export_file.write(orjson.dumps(cleaned_content, default=str) + b"\n") return n def _write_record_batch(self, record_batch: pa.RecordBatch) -> None: diff --git a/posthog/temporal/common/asyncpa.py b/posthog/temporal/common/asyncpa.py index c301538a50eb0..d76dffb5ecb9c 100644 --- a/posthog/temporal/common/asyncpa.py +++ b/posthog/temporal/common/asyncpa.py @@ -1,6 +1,10 @@ +import asyncio import typing import pyarrow as pa +import structlog + +logger = structlog.get_logger() CONTINUATION_BYTES = b"\xff\xff\xff\xff" @@ -12,7 +16,7 @@ class InvalidMessageFormat(Exception): class AsyncMessageReader: """Asynchronously read PyArrow messages from bytes iterator.""" - def __init__(self, bytes_iter: typing.AsyncIterator[bytes]): + def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]): self._bytes = bytes_iter self._buffer = bytearray() @@ -58,7 +62,8 @@ async def read_next_message(self) -> pa.Message: async def read_until(self, n: int) -> None: """Read from self._bytes until there are at least n bytes in self._buffer.""" while len(self._buffer) < n: - self._buffer.extend(await anext(self._bytes)) + bytes, _ = await anext(self._bytes) + self._buffer.extend(bytes) def parse_body_size(self, metadata_flatbuffer: bytearray) -> int: """Parse body size from metadata flatbuffer. @@ -98,7 +103,7 @@ def parse_body_size(self, metadata_flatbuffer: bytearray) -> int: class AsyncRecordBatchReader: """Asynchronously read PyArrow RecordBatches from an iterator of bytes.""" - def __init__(self, bytes_iter: typing.AsyncIterator[bytes]) -> None: + def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]) -> None: self._reader = AsyncMessageReader(bytes_iter) self._schema: None | pa.Schema = None @@ -127,3 +132,20 @@ async def read_schema(self) -> pa.Schema: raise TypeError(f"Expected message of type 'schema' got '{message.type}'") return pa.ipc.read_schema(message) + + +class AsyncRecordBatchProducer(AsyncRecordBatchReader): + def __init__(self, bytes_iter: typing.AsyncIterator[tuple[bytes, bool]]) -> None: + super().__init__(bytes_iter) + + async def produce(self, queue: asyncio.Queue, done_event: asyncio.Event): + await logger.adebug("Starting record batch produce loop") + while True: + try: + record_batch = await self.read_next_record_batch() + except StopAsyncIteration: + await logger.adebug("No more record batches to produce, closing loop") + done_event.set() + return + + await queue.put(record_batch) diff --git a/posthog/temporal/common/clickhouse.py b/posthog/temporal/common/clickhouse.py index 147f09813e2c6..570cfe8d5bb5e 100644 --- a/posthog/temporal/common/clickhouse.py +++ b/posthog/temporal/common/clickhouse.py @@ -1,7 +1,9 @@ +import asyncio import collections.abc import contextlib import datetime as dt import json +import ssl import typing import uuid @@ -10,7 +12,7 @@ import requests from django.conf import settings -from posthog.temporal.common.asyncpa import AsyncRecordBatchReader +import posthog.temporal.common.asyncpa as asyncpa def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: @@ -76,6 +78,13 @@ def encode_clickhouse_data(data: typing.Any, quote_char="'") -> bytes: return f"{quote_char}{str_data}{quote_char}".encode() +class ClickHouseClientNotConnected(Exception): + """Exception raised when attempting to run an async query without connecting.""" + + def __init__(self): + super().__init__("ClickHouseClient is not connected. Are you running in a context manager?") + + class ClickHouseError(Exception): """Base Exception representing anything going wrong with ClickHouse.""" @@ -97,21 +106,21 @@ class ClickHouseClient: def __init__( self, - session: aiohttp.ClientSession | None = None, url: str = "http://localhost:8123", user: str = "default", password: str = "", database: str = "default", + timeout: None | aiohttp.ClientTimeout = None, + ssl: ssl.SSLContext | bool = True, **kwargs, ): - if session is None: - self.session = aiohttp.ClientSession() - else: - self.session = session - self.url = url self.headers = {} self.params = {} + self.timeout = timeout + self.ssl = ssl + self.connector: None | aiohttp.TCPConnector = None + self.session: None | aiohttp.ClientSession = None if user: self.headers["X-ClickHouse-User"] = user @@ -123,10 +132,9 @@ def __init__( self.params.update(kwargs) @classmethod - def from_posthog_settings(cls, session, settings, **kwargs): + def from_posthog_settings(cls, settings, **kwargs): """Initialize a ClickHouseClient from PostHog settings.""" return cls( - session=session, url=settings.CLICKHOUSE_URL, user=settings.CLICKHOUSE_USER, password=settings.CLICKHOUSE_PASSWORD, @@ -140,6 +148,9 @@ async def is_alive(self) -> bool: Returns: A boolean indicating whether the connection is alive. """ + if self.session is None: + raise ClickHouseClientNotConnected() + try: await self.session.get( url=self.url, @@ -217,6 +228,8 @@ async def aget_query( Returns: The response received from the ClickHouse HTTP interface. """ + if self.session is None: + raise ClickHouseClientNotConnected() params = {**self.params} if query_id is not None: @@ -245,6 +258,8 @@ async def apost_query( Returns: The response received from the ClickHouse HTTP interface. """ + if self.session is None: + raise ClickHouseClientNotConnected() params = {**self.params} if query_id is not None: @@ -369,20 +384,48 @@ async def astream_query_as_arrow( """Execute the given query in ClickHouse and stream back the response as Arrow record batches. This method makes sense when running with FORMAT ArrowStream, although we currently do not enforce this. - As pyarrow doesn't support async/await buffers, this method is sync and utilizes requests instead of aiohttp. """ async with self.apost_query(query, *data, query_parameters=query_parameters, query_id=query_id) as response: - reader = AsyncRecordBatchReader(response.content.iter_any()) + reader = asyncpa.AsyncRecordBatchReader(response.content.iter_chunks()) async for batch in reader: yield batch + async def aproduce_query_as_arrow_record_batches( + self, + query, + *data, + queue: asyncio.Queue, + done_event: asyncio.Event, + query_parameters=None, + query_id: str | None = None, + ) -> None: + """Execute the given query in ClickHouse and produce Arrow record batches to given buffer queue. + + This method makes sense when running with FORMAT ArrowStream, although we currently do not enforce this. + This method is intended to be ran as a background task, producing record batches continuously, while other + downstream consumer tasks process them from the queue. + """ + async with self.apost_query(query, *data, query_parameters=query_parameters, query_id=query_id) as response: + reader = asyncpa.AsyncRecordBatchProducer(response.content.iter_chunks()) + await reader.produce(queue=queue, done_event=done_event) + async def __aenter__(self): """Enter method part of the AsyncContextManager protocol.""" + self.connector = aiohttp.TCPConnector(ssl=self.ssl) + self.session = aiohttp.ClientSession(connector=self.connector, timeout=self.timeout) return self async def __aexit__(self, exc_type, exc_value, tb): """Exit method part of the AsyncContextManager protocol.""" - await self.session.close() + if self.session is not None: + await self.session.close() + + if self.connector is not None: + await self.connector.close() + + self.session = None + self.connector = None + return False @contextlib.asynccontextmanager @@ -427,19 +470,17 @@ async def get_client( team_id, settings.CLICKHOUSE_MAX_BLOCK_SIZE_DEFAULT ) - with aiohttp.TCPConnector(ssl=False) as connector: - async with aiohttp.ClientSession(connector=connector, timeout=timeout) as session: - async with ClickHouseClient( - session, - url=settings.CLICKHOUSE_OFFLINE_HTTP_URL, - user=settings.CLICKHOUSE_USER, - password=settings.CLICKHOUSE_PASSWORD, - database=settings.CLICKHOUSE_DATABASE, - max_execution_time=settings.CLICKHOUSE_MAX_EXECUTION_TIME, - max_memory_usage=settings.CLICKHOUSE_MAX_MEMORY_USAGE, - max_block_size=max_block_size, - cancel_http_readonly_queries_on_client_close=1, - output_format_arrow_string_as_string="true", - **kwargs, - ) as client: - yield client + async with ClickHouseClient( + url=settings.CLICKHOUSE_OFFLINE_HTTP_URL, + user=settings.CLICKHOUSE_USER, + password=settings.CLICKHOUSE_PASSWORD, + database=settings.CLICKHOUSE_DATABASE, + timeout=timeout, + ssl=False, + max_execution_time=settings.CLICKHOUSE_MAX_EXECUTION_TIME, + max_memory_usage=settings.CLICKHOUSE_MAX_MEMORY_USAGE, + max_block_size=max_block_size, + output_format_arrow_string_as_string="true", + **kwargs, + ) as client: + yield client diff --git a/posthog/temporal/common/logger.py b/posthog/temporal/common/logger.py index c769116921f6c..2b1107d8124cc 100644 --- a/posthog/temporal/common/logger.py +++ b/posthog/temporal/common/logger.py @@ -1,8 +1,8 @@ import asyncio import json import logging -import uuid import ssl +import uuid import aiokafka import structlog @@ -14,7 +14,6 @@ from posthog.kafka_client.topics import KAFKA_LOG_ENTRIES - BACKGROUND_LOGGER_TASKS = set() @@ -29,6 +28,18 @@ async def bind_temporal_worker_logger(team_id: int, destination: str | None = No return logger.new(team_id=team_id, destination=destination, **temporal_context) +async def configure_temporal_worker_logger( + logger, team_id: int, destination: str | None = None +) -> FilteringBoundLogger: + """Return a bound logger for Temporal Workers.""" + if not structlog.is_configured(): + configure_logger() + + temporal_context = get_temporal_context() + + return logger.new(team_id=team_id, destination=destination, **temporal_context) + + async def bind_temporal_org_worker_logger( organization_id: uuid.UUID, destination: str | None = None ) -> FilteringBoundLogger: diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index e945b483d8e95..5895309e270bf 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -6,6 +6,7 @@ update_external_data_job_model, check_schedule_activity, check_billing_limits_activity, + sync_new_schemas_activity, ) WORKFLOWS = [ExternalDataJobWorkflow] @@ -17,4 +18,5 @@ create_source_templates, check_schedule_activity, check_billing_limits_activity, + sync_new_schemas_activity, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index 439318ebf591a..f9dcfd28bc4b5 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -11,6 +11,10 @@ CheckBillingLimitsActivityInputs, check_billing_limits_activity, ) +from posthog.temporal.data_imports.workflow_activities.sync_new_schemas import ( + SyncNewSchemasActivityInputs, + sync_new_schemas_activity, +) from posthog.temporal.utils import ExternalDataWorkflowInputs from posthog.temporal.data_imports.workflow_activities.create_job_model import ( CreateExternalDataJobModelActivityInputs, @@ -152,7 +156,6 @@ async def run(self, inputs: ExternalDataWorkflowInputs): source_id=inputs.external_data_source_id, ) - # TODO: split out the creation of the external data job model from schema getting to seperate out exception handling job_id, incremental = await workflow.execute_activity( create_external_data_job_model_activity, create_external_data_job_inputs, @@ -161,7 +164,7 @@ async def run(self, inputs: ExternalDataWorkflowInputs): initial_interval=dt.timedelta(seconds=10), maximum_interval=dt.timedelta(seconds=60), maximum_attempts=3, - non_retryable_error_types=["NotNullViolation", "IntegrityError", "BaseSSHTunnelForwarderError"], + non_retryable_error_types=["NotNullViolation", "IntegrityError"], ), ) @@ -191,6 +194,18 @@ async def run(self, inputs: ExternalDataWorkflowInputs): ) try: + await workflow.execute_activity( + sync_new_schemas_activity, + SyncNewSchemasActivityInputs(source_id=str(inputs.external_data_source_id), team_id=inputs.team_id), + start_to_close_timeout=dt.timedelta(minutes=10), + retry_policy=RetryPolicy( + initial_interval=dt.timedelta(seconds=10), + maximum_interval=dt.timedelta(seconds=60), + maximum_attempts=3, + non_retryable_error_types=["NotNullViolation", "IntegrityError", "BaseSSHTunnelForwarderError"], + ), + ) + job_inputs = ImportDataActivityInputs( team_id=inputs.team_id, run_id=job_id, diff --git a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py index 96bfa8a9d202d..962cbb2d4ad9b 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py @@ -65,6 +65,8 @@ def sql_source_for_type( else: incremental = None + connect_args = [] + if source_type == ExternalDataSource.Type.POSTGRES: credentials = ConnectionStringCredentials( f"postgresql://{user}:{password}@{host}:{port}/{database}?sslmode={sslmode}" @@ -76,6 +78,10 @@ def sql_source_for_type( credentials = ConnectionStringCredentials( f"mysql+pymysql://{user}:{password}@{host}:{port}/{database}?ssl_ca={ssl_ca}&ssl_verify_cert=false" ) + + # PlanetScale needs this to be set + if host.endswith("psdb.cloud"): + connect_args = ["SET workload = 'OLAP';"] elif source_type == ExternalDataSource.Type.MSSQL: credentials = ConnectionStringCredentials( f"mssql+pyodbc://{user}:{password}@{host}:{port}/{database}?driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes" @@ -84,7 +90,12 @@ def sql_source_for_type( raise Exception("Unsupported source_type") db_source = sql_database( - credentials, schema=schema, table_names=table_names, incremental=incremental, team_id=team_id + credentials, + schema=schema, + table_names=table_names, + incremental=incremental, + team_id=team_id, + connect_args=connect_args, ) return db_source @@ -180,6 +191,7 @@ def sql_database( table_names: Optional[List[str]] = dlt.config.value, # noqa: UP006 incremental: Optional[dlt.sources.incremental] = None, team_id: Optional[int] = None, + connect_args: Optional[list[str]] = None, ) -> Iterable[DltResource]: """ A DLT source which loads data from an SQL database using SQLAlchemy. @@ -231,6 +243,7 @@ def sql_database( engine=engine, table=table, incremental=incremental, + connect_args=connect_args, ) ) diff --git a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py index d877effb3e374..50577b6b04d17 100644 --- a/posthog/temporal/data_imports/pipelines/sql_database/helpers.py +++ b/posthog/temporal/data_imports/pipelines/sql_database/helpers.py @@ -14,7 +14,7 @@ from dlt.common.typing import TDataItem from .settings import DEFAULT_CHUNK_SIZE -from sqlalchemy import Table, create_engine, Column +from sqlalchemy import Table, create_engine, Column, text from sqlalchemy.engine import Engine from sqlalchemy.sql import Select @@ -26,11 +26,13 @@ def __init__( table: Table, chunk_size: int = 1000, incremental: Optional[dlt.sources.incremental[Any]] = None, + connect_args: Optional[list[str]] = None, ) -> None: self.engine = engine self.table = table self.chunk_size = chunk_size self.incremental = incremental + self.connect_args = connect_args if incremental: try: self.cursor_column: Optional[Column[Any]] = table.c[incremental.cursor_path] @@ -74,6 +76,9 @@ def make_query(self) -> Select[Any]: def load_rows(self) -> Iterator[list[TDataItem]]: query = self.make_query() with self.engine.connect() as conn: + if self.connect_args: + for stmt in self.connect_args: + conn.execute(text(stmt)) result = conn.execution_options(yield_per=self.chunk_size).execute(query) for partition in result.partitions(size=self.chunk_size): yield [dict(row._mapping) for row in partition] @@ -84,6 +89,7 @@ def table_rows( table: Table, chunk_size: int = DEFAULT_CHUNK_SIZE, incremental: Optional[dlt.sources.incremental[Any]] = None, + connect_args: Optional[list[str]] = None, ) -> Iterator[TDataItem]: """ A DLT source which loads data from an SQL database using SQLAlchemy. @@ -100,7 +106,7 @@ def table_rows( """ yield dlt.mark.materialize_table_schema() # type: ignore - loader = TableLoader(engine, table, incremental=incremental, chunk_size=chunk_size) + loader = TableLoader(engine, table, incremental=incremental, chunk_size=chunk_size, connect_args=connect_args) yield from loader.load_rows() engine.dispose() diff --git a/posthog/temporal/data_imports/workflow_activities/create_job_model.py b/posthog/temporal/data_imports/workflow_activities/create_job_model.py index dac5a3d81d032..ac8ab640161cf 100644 --- a/posthog/temporal/data_imports/workflow_activities/create_job_model.py +++ b/posthog/temporal/data_imports/workflow_activities/create_job_model.py @@ -5,19 +5,15 @@ from temporalio import activity # TODO: remove dependency -from posthog.temporal.data_imports.pipelines.schemas import PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING from posthog.warehouse.external_data_source.jobs import ( create_external_data_job, ) -from posthog.warehouse.models import sync_old_schemas_with_new_schemas, ExternalDataSource, aget_schema_by_id +from posthog.warehouse.models import aget_schema_by_id from posthog.warehouse.models.external_data_schema import ( ExternalDataSchema, - get_sql_schemas_for_source_type, - get_snowflake_schemas, ) from posthog.temporal.common.logger import bind_temporal_worker_logger -from posthog.warehouse.models.ssh_tunnel import SSHTunnel @dataclasses.dataclass @@ -44,66 +40,6 @@ async def create_external_data_job_model_activity(inputs: CreateExternalDataJobM schema.status = ExternalDataSchema.Status.RUNNING await sync_to_async(schema.save)() - source = await sync_to_async(ExternalDataSource.objects.get)(team_id=inputs.team_id, id=inputs.source_id) - - if source.source_type in [ - ExternalDataSource.Type.POSTGRES, - ExternalDataSource.Type.MYSQL, - ExternalDataSource.Type.MSSQL, - ]: - host = source.job_inputs.get("host") - port = source.job_inputs.get("port") - user = source.job_inputs.get("user") - password = source.job_inputs.get("password") - database = source.job_inputs.get("database") - db_schema = source.job_inputs.get("schema") - - using_ssh_tunnel = str(source.job_inputs.get("ssh_tunnel_enabled", False)) == "True" - ssh_tunnel_host = source.job_inputs.get("ssh_tunnel_host") - ssh_tunnel_port = source.job_inputs.get("ssh_tunnel_port") - ssh_tunnel_auth_type = source.job_inputs.get("ssh_tunnel_auth_type") - ssh_tunnel_auth_type_username = source.job_inputs.get("ssh_tunnel_auth_type_username") - ssh_tunnel_auth_type_password = source.job_inputs.get("ssh_tunnel_auth_type_password") - ssh_tunnel_auth_type_passphrase = source.job_inputs.get("ssh_tunnel_auth_type_passphrase") - ssh_tunnel_auth_type_private_key = source.job_inputs.get("ssh_tunnel_auth_type_private_key") - - ssh_tunnel = SSHTunnel( - enabled=using_ssh_tunnel, - host=ssh_tunnel_host, - port=ssh_tunnel_port, - auth_type=ssh_tunnel_auth_type, - username=ssh_tunnel_auth_type_username, - password=ssh_tunnel_auth_type_password, - passphrase=ssh_tunnel_auth_type_passphrase, - private_key=ssh_tunnel_auth_type_private_key, - ) - - schemas_to_sync = await sync_to_async(get_sql_schemas_for_source_type)( - source.source_type, host, port, database, user, password, db_schema, ssh_tunnel - ) - elif source.source_type == ExternalDataSource.Type.SNOWFLAKE: - account_id = source.job_inputs.get("account_id") - user = source.job_inputs.get("user") - password = source.job_inputs.get("password") - database = source.job_inputs.get("database") - warehouse = source.job_inputs.get("warehouse") - sf_schema = source.job_inputs.get("schema") - role = source.job_inputs.get("role") - - schemas_to_sync = await sync_to_async(get_snowflake_schemas)( - account_id, database, warehouse, user, password, sf_schema, role - ) - else: - schemas_to_sync = list(PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING.get(source.source_type, ())) - - # TODO: this could cause a race condition where each schema worker creates the missing schema - - await sync_to_async(sync_old_schemas_with_new_schemas)( - schemas_to_sync, - source_id=inputs.source_id, - team_id=inputs.team_id, - ) - logger.info( f"Created external data job for external data source {inputs.source_id}", ) diff --git a/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py b/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py new file mode 100644 index 0000000000000..34e27b0cd49ff --- /dev/null +++ b/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py @@ -0,0 +1,104 @@ +import dataclasses + +from asgiref.sync import sync_to_async +from temporalio import activity + +from posthog.temporal.common.logger import bind_temporal_worker_logger +from posthog.temporal.data_imports.pipelines.schemas import PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING + +from posthog.warehouse.models import sync_old_schemas_with_new_schemas, ExternalDataSource +from posthog.warehouse.models.external_data_schema import ( + get_sql_schemas_for_source_type, + get_snowflake_schemas, +) +from posthog.warehouse.models.ssh_tunnel import SSHTunnel + + +@dataclasses.dataclass +class SyncNewSchemasActivityInputs: + source_id: str + team_id: int + + +@activity.defn +async def sync_new_schemas_activity(inputs: SyncNewSchemasActivityInputs) -> None: + logger = await bind_temporal_worker_logger(team_id=inputs.team_id) + + logger.info("Syncing new -> old schemas") + + source = await sync_to_async(ExternalDataSource.objects.get)(team_id=inputs.team_id, id=inputs.source_id) + + schemas_to_sync: list[str] = [] + + if source.source_type in [ + ExternalDataSource.Type.POSTGRES, + ExternalDataSource.Type.MYSQL, + ExternalDataSource.Type.MSSQL, + ]: + if not source.job_inputs: + return + + host = source.job_inputs.get("host") + port = source.job_inputs.get("port") + user = source.job_inputs.get("user") + password = source.job_inputs.get("password") + database = source.job_inputs.get("database") + db_schema = source.job_inputs.get("schema") + + using_ssh_tunnel = str(source.job_inputs.get("ssh_tunnel_enabled", False)) == "True" + ssh_tunnel_host = source.job_inputs.get("ssh_tunnel_host") + ssh_tunnel_port = source.job_inputs.get("ssh_tunnel_port") + ssh_tunnel_auth_type = source.job_inputs.get("ssh_tunnel_auth_type") + ssh_tunnel_auth_type_username = source.job_inputs.get("ssh_tunnel_auth_type_username") + ssh_tunnel_auth_type_password = source.job_inputs.get("ssh_tunnel_auth_type_password") + ssh_tunnel_auth_type_passphrase = source.job_inputs.get("ssh_tunnel_auth_type_passphrase") + ssh_tunnel_auth_type_private_key = source.job_inputs.get("ssh_tunnel_auth_type_private_key") + + ssh_tunnel = SSHTunnel( + enabled=using_ssh_tunnel, + host=ssh_tunnel_host, + port=ssh_tunnel_port, + auth_type=ssh_tunnel_auth_type, + username=ssh_tunnel_auth_type_username, + password=ssh_tunnel_auth_type_password, + passphrase=ssh_tunnel_auth_type_passphrase, + private_key=ssh_tunnel_auth_type_private_key, + ) + + sql_schemas = await sync_to_async(get_sql_schemas_for_source_type)( + source.source_type, host, port, database, user, password, db_schema, ssh_tunnel + ) + + schemas_to_sync = list(sql_schemas.keys()) + elif source.source_type == ExternalDataSource.Type.SNOWFLAKE: + if not source.job_inputs: + return + + account_id = source.job_inputs.get("account_id") + user = source.job_inputs.get("user") + password = source.job_inputs.get("password") + database = source.job_inputs.get("database") + warehouse = source.job_inputs.get("warehouse") + sf_schema = source.job_inputs.get("schema") + role = source.job_inputs.get("role") + + sql_schemas = await sync_to_async(get_snowflake_schemas)( + account_id, database, warehouse, user, password, sf_schema, role + ) + + schemas_to_sync = list(sql_schemas.keys()) + else: + schemas_to_sync = list(PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING.get(source.source_type, ())) + + # TODO: this could cause a race condition where each schema worker creates the missing schema + + schemas_created = await sync_to_async(sync_old_schemas_with_new_schemas)( + schemas_to_sync, + source_id=inputs.source_id, + team_id=inputs.team_id, + ) + + if len(schemas_created) > 0: + logger.info(f"Added new schemas: {', '.join(schemas_created)}") + else: + logger.info("No new schemas to create") diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index dda307dda004a..8c3fb186b82cd 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -2,15 +2,19 @@ import json import operator from random import randint +import asyncio import pytest from django.test import override_settings +import pyarrow as pa from posthog.batch_exports.service import BatchExportModel from posthog.temporal.batch_exports.batch_exports import ( get_data_interval, iter_model_records, iter_records, + start_produce_batch_export_record_batches, + RecordBatchQueue, ) from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse @@ -404,3 +408,427 @@ def test_get_data_interval(interval, data_interval_end, expected): """Test get_data_interval returns the expected data interval tuple.""" result = get_data_interval(interval, data_interval_end) assert result == expected + + +async def get_record_batch_from_queue(queue, done_event): + while not queue.empty() or not done_event.is_set(): + try: + record_batch = queue.get_nowait() + except asyncio.QueueEmpty: + if done_event.is_set(): + break + else: + await asyncio.sleep(0.1) + continue + + return record_batch + return None + + +async def test_start_produce_batch_export_record_batches_uses_extra_query_parameters(clickhouse_client): + """Test start_produce_batch_export_record_batches uses a HogQL value.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=0, + count_other_team=0, + duplicate=False, + properties={"$browser": "Chrome", "$os": "Mac OS X", "custom": 3}, + ) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + fields=[ + {"expression": "JSONExtractInt(properties, %(hogql_val_0)s)", "alias": "custom_prop"}, + ], + extra_query_parameters={"hogql_val_0": "custom"}, + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + for expected, record in zip(events, records): + if expected["properties"] is None: + raise ValueError("Empty properties") + + assert record["custom_prop"] == expected["properties"]["custom"] + + +async def test_start_produce_batch_export_record_batches_can_flatten_properties(clickhouse_client): + """Test start_produce_batch_export_record_batches can flatten properties.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=0, + count_other_team=0, + duplicate=False, + properties={"$browser": "Chrome", "$os": "Mac OS X", "custom-property": 3}, + ) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + fields=[ + {"expression": "event", "alias": "event"}, + {"expression": "JSONExtractString(properties, '$browser')", "alias": "browser"}, + {"expression": "JSONExtractString(properties, '$os')", "alias": "os"}, + {"expression": "JSONExtractInt(properties, 'custom-property')", "alias": "custom_prop"}, + ], + extra_query_parameters={"hogql_val_0": "custom"}, + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + all_expected = sorted(events, key=operator.itemgetter("event")) + all_record = sorted(records, key=operator.itemgetter("event")) + + for expected, record in zip(all_expected, all_record): + if expected["properties"] is None: + raise ValueError("Empty properties") + + assert record["browser"] == expected["properties"]["$browser"] + assert record["os"] == expected["properties"]["$os"] + assert record["custom_prop"] == expected["properties"]["custom-property"] + + +@pytest.mark.parametrize( + "field", + [ + {"expression": "event", "alias": "event_name"}, + {"expression": "team_id", "alias": "team"}, + {"expression": "timestamp", "alias": "time_the_stamp"}, + {"expression": "created_at", "alias": "creation_time"}, + ], +) +async def test_start_produce_batch_export_record_batches_with_single_field_and_alias(clickhouse_client, field): + """Test start_produce_batch_export_record_batches can return a single aliased field.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=0, + count_other_team=0, + duplicate=False, + properties={"$browser": "Chrome", "$os": "Mac OS X"}, + ) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + fields=[field], + extra_query_parameters={}, + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + all_expected = sorted(events, key=operator.itemgetter(field["expression"])) + all_record = sorted(records, key=operator.itemgetter(field["alias"])) + + for expected, record in zip(all_expected, all_record): + assert len(record) == 2 + # Always set for progress tracking + assert record.get("_inserted_at", None) is not None + + result = record[field["alias"]] + expected_value = expected[field["expression"]] # type: ignore + + if isinstance(result, dt.datetime): + # Event generation function returns datetimes as strings. + expected_value = dt.datetime.fromisoformat(expected_value).replace(tzinfo=dt.UTC) + + assert result == expected_value + + +async def test_start_produce_batch_export_record_batches_ignores_timestamp_predicates(clickhouse_client): + """Test the rows returned ignore timestamp predicates when configured.""" + team_id = randint(1, 1000000) + + inserted_at = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + data_interval_end = inserted_at + dt.timedelta(hours=1) + + # Insert some data with timestamps a couple of years before inserted_at + timestamp_start = inserted_at - dt.timedelta(hours=24 * 365 * 2) + timestamp_end = inserted_at - dt.timedelta(hours=24 * 365) + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=timestamp_start, + end_time=timestamp_end, + count=10, + count_outside_range=0, + count_other_team=0, + duplicate=True, + person_properties={"$browser": "Chrome", "$os": "Mac OS X"}, + inserted_at=inserted_at, + ) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=inserted_at.isoformat(), + interval_end=data_interval_end.isoformat(), + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + assert len(records) == 0 + + with override_settings(UNCONSTRAINED_TIMESTAMP_TEAM_IDS=[str(team_id)]): + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=inserted_at.isoformat(), + interval_end=data_interval_end.isoformat(), + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + assert_records_match_events(records, events) + + +async def test_start_produce_batch_export_record_batches_can_include_events(clickhouse_client): + """Test the rows returned can include events.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10000, + count_outside_range=0, + count_other_team=0, + duplicate=True, + person_properties={"$browser": "Chrome", "$os": "Mac OS X"}, + ) + + # Include the latter half of events. + include_events = (event["event"] for event in events[5000:]) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + include_events=include_events, + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + assert_records_match_events(records, events[5000:]) + + +async def test_start_produce_batch_export_record_batches_can_exclude_events(clickhouse_client): + """Test the rows returned can include events.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10000, + count_outside_range=0, + count_other_team=0, + duplicate=True, + person_properties={"$browser": "Chrome", "$os": "Mac OS X"}, + ) + + # Exclude the latter half of events. + exclude_events = (event["event"] for event in events[5000:]) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + exclude_events=exclude_events, + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + assert_records_match_events(records, events[:5000]) + + +async def test_start_produce_batch_export_record_batches_handles_duplicates(clickhouse_client): + """Test the rows returned are de-duplicated.""" + team_id = randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=100, + count_outside_range=0, + count_other_team=0, + duplicate=True, + person_properties={"$browser": "Chrome", "$os": "Mac OS X"}, + ) + + queue, done_event, _ = start_produce_batch_export_record_batches( + client=clickhouse_client, + team_id=team_id, + is_backfill=False, + model_name="events", + interval_start=data_interval_start.isoformat(), + interval_end=data_interval_end.isoformat(), + ) + + records = [] + while not queue.empty() or not done_event.is_set(): + record_batch = await get_record_batch_from_queue(queue, done_event) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + + assert_records_match_events(records, events) + + +async def test_record_batch_queue_tracks_bytes(): + """Test `RecordBatchQueue` tracks bytes from `RecordBatch`.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + + queue = RecordBatchQueue() + + await queue.put(record_batch) + assert record_batch.get_total_buffer_size() == queue.qsize() + + item = await queue.get() + + assert item == record_batch + assert queue.qsize() == 0 + + +async def test_record_batch_queue_raises_queue_full(): + """Test `QueueFull` is raised when we put too many bytes.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + record_batch_size = record_batch.get_total_buffer_size() + + queue = RecordBatchQueue(max_size_bytes=record_batch_size) + + await queue.put(record_batch) + assert record_batch.get_total_buffer_size() == queue.qsize() + + with pytest.raises(asyncio.QueueFull): + queue.put_nowait(record_batch) + + item = await queue.get() + + assert item == record_batch + assert queue.qsize() == 0 + + +async def test_record_batch_queue_sets_schema(): + """Test `RecordBatchQueue` sets a schema from first `RecordBatch`.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + + queue = RecordBatchQueue() + + await queue.put(record_batch) + + assert queue._schema_set.is_set() + + schema = await queue.get_schema() + assert schema == record_batch.schema diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index 0f184b79356a1..00228adcb8cff 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -105,7 +105,12 @@ async def assert_clickhouse_records_in_bigquery( inserted_bq_ingested_timestamp.append(v) continue - inserted_record[k] = json.loads(v) if k in json_columns and v is not None else v + if k in json_columns: + assert ( + isinstance(v, dict) or v is None + ), f"Expected '{k}' to be JSON, but it was not deserialized to dict" + + inserted_record[k] = v inserted_records.append(inserted_record) diff --git a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py index 3b81878541fd7..2067ae65d7cae 100644 --- a/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py @@ -63,7 +63,7 @@ async def assert_clickhouse_records_in_redshfit( data_interval_end: dt.datetime, exclude_events: list[str] | None = None, include_events: list[str] | None = None, - use_super_type: bool = False, + properties_data_type: str = "varchar", sort_key: str = "event", is_backfill: bool = False, ): @@ -90,14 +90,25 @@ async def assert_clickhouse_records_in_redshfit( team_id: The ID of the team that we are testing events for. batch_export_schema: Custom schema used in the batch export. """ - inserted_records = [] + super_columns = ["properties", "set", "set_once", "person_properties"] + inserted_records = [] async with redshift_connection.cursor() as cursor: await cursor.execute(sql.SQL("SELECT * FROM {}").format(sql.Identifier(schema_name, table_name))) columns = [column.name for column in cursor.description] for row in await cursor.fetchall(): event = dict(zip(columns, row)) + + for column in super_columns: + # When reading a SUPER type field we read it as a str. + # But Redshift will remove all unquoted whitespace, so + # '{"prop": 1, "prop": 2}' in CH becomes '{"prop":1,"prop":2}' in Redshift. + # To make comparison easier we load them as JSON even if we don't have + # properties_data_type set to SUPER, thus they are both dicts. + if column in event and event.get(column, None) is not None: + event[column] = json.loads(event[column]) + inserted_records.append(event) schema_column_names = [field["alias"] for field in redshift_default_fields()] @@ -110,9 +121,15 @@ async def assert_clickhouse_records_in_redshfit( if batch_export_schema is not None: schema_column_names = [field["alias"] for field in batch_export_schema["fields"]] elif isinstance(batch_export_model, BatchExportModel) and batch_export_model.name == "persons": - schema_column_names = ["team_id", "distinct_id", "person_id", "properties", "version", "_inserted_at"] - - super_columns = ["properties", "set", "set_once", "person_properties"] + schema_column_names = [ + "team_id", + "distinct_id", + "person_id", + "properties", + "person_distinct_id_version", + "person_version", + "_inserted_at", + ] expected_records = [] async for record_batch in iter_model_records( @@ -134,12 +151,10 @@ async def assert_clickhouse_records_in_redshfit( # _inserted_at is not exported, only used for tracking progress. continue - if k in super_columns and v is not None: - expected_record[k] = json.dumps( - remove_escaped_whitespace_recursive(json.loads(v)), ensure_ascii=False - ) + elif k in super_columns and v is not None: + expected_record[k] = remove_escaped_whitespace_recursive(json.loads(v)) elif isinstance(v, dt.datetime): - expected_record[k] = v.replace(tzinfo=dt.UTC) # type: ignore + expected_record[k] = v.replace(tzinfo=dt.UTC) else: expected_record[k] = v @@ -214,6 +229,15 @@ async def psycopg_connection(redshift_config, setup_postgres_test_db): await connection.close() +@pytest.fixture +def properties_data_type(request) -> str: + """A parametrizable fixture to configure the `str` `properties_data_type` setting.""" + try: + return request.param + except AttributeError: + return "varchar" + + TEST_MODELS: list[BatchExportModel | BatchExportSchema | None] = [ BatchExportModel( name="a-custom-model", @@ -241,6 +265,7 @@ async def psycopg_connection(redshift_config, setup_postgres_test_db): @pytest.mark.parametrize("exclude_events", [None, ["test-exclude"]], indirect=True) +@pytest.mark.parametrize("properties_data_type", ["super", "varchar"], indirect=True) @pytest.mark.parametrize("model", TEST_MODELS) async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( clickhouse_client, @@ -252,6 +277,7 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( generate_test_data, data_interval_start, data_interval_end, + properties_data_type, ateam, ): """Test that the insert_into_redshift_activity function inserts data into a Redshift table. @@ -273,6 +299,9 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( if isinstance(model, BatchExportModel) and model.name == "persons" and MISSING_REQUIRED_ENV_VARS: pytest.skip("Persons batch export cannot be tested in PostgreSQL") + if properties_data_type == "super" and MISSING_REQUIRED_ENV_VARS: + pytest.skip("SUPER type is only available in Redshift") + await generate_test_events_in_clickhouse( client=clickhouse_client, team_id=ateam.pk, @@ -307,6 +336,7 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( exclude_events=exclude_events, batch_export_schema=batch_export_schema, batch_export_model=batch_export_model, + properties_data_type=properties_data_type, **redshift_config, ) @@ -322,6 +352,7 @@ async def test_insert_into_redshift_activity_inserts_data_into_redshift_table( data_interval_end=data_interval_end, batch_export_model=model, exclude_events=exclude_events, + properties_data_type=properties_data_type, sort_key="person_id" if batch_export_model is not None and batch_export_model.name == "persons" else "event", ) diff --git a/posthog/temporal/tests/batch_exports/test_temporary_file.py b/posthog/temporal/tests/batch_exports/test_temporary_file.py index 4f6ffc8ad0569..900ca5e9d3fed 100644 --- a/posthog/temporal/tests/batch_exports/test_temporary_file.py +++ b/posthog/temporal/tests/batch_exports/test_temporary_file.py @@ -246,6 +246,7 @@ async def store_in_memory_on_flush( assert writer.records_total == record_batch.num_rows + in_memory_file_obj.seek(0) lines = in_memory_file_obj.readlines() for index, line in enumerate(lines): written_jsonl = json.loads(line) @@ -254,7 +255,7 @@ async def store_in_memory_on_flush( expected_jsonl = single_record_batch.to_pylist()[0] assert "_inserted_at" not in written_jsonl - assert written_jsonl == expected_jsonl + assert written_jsonl == {k: v for k, v in expected_jsonl.items() if k != "_inserted_at"} assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] @@ -288,6 +289,7 @@ async def store_in_memory_on_flush( async with writer.open_temporary_file(): await writer.write_record_batch(record_batch) + in_memory_file_obj.seek(0) reader = csv.reader( in_memory_file_obj, delimiter=",", @@ -297,10 +299,10 @@ async def store_in_memory_on_flush( ) for index, written_csv_row in enumerate(reader): single_record_batch = record_batch.slice(offset=index, length=1) - expected_csv = single_record_batch.to_pylist()[0] + expected_dict = single_record_batch.to_pylist()[0] assert "_inserted_at" not in written_csv_row - assert written_csv_row == expected_csv + assert written_csv_row == list({k: v for k, v in expected_dict.items() if k != "_inserted_at"}.values()) assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] @@ -339,6 +341,7 @@ async def store_in_memory_on_flush( async with writer.open_temporary_file(): await writer.write_record_batch(record_batch) + in_memory_file_obj.seek(0) written_parquet = pq.read_table(in_memory_file_obj) for index, written_row_as_dict in enumerate(written_parquet.to_pylist()): @@ -418,3 +421,108 @@ async def track_flushes(*args, **kwargs): assert writer.records_since_last_flush == 0 assert flush_counter == 2 + + +@pytest.mark.asyncio +async def test_jsonl_writer_deals_with_web_vitals(): + """Test old $web_vitals record batches are written as valid JSONL.""" + in_memory_file_obj = io.BytesIO() + inserted_ats_seen: list[LastInsertedAt] = [] + + record_batch = pa.RecordBatch.from_pydict( + { + "event": pa.array(["$web_vitals"]), + "properties": pa.array( + [ + { + "$web_vitals_INP_event": { + "attribution": {"interactionTargetElement": json.loads("[" * 256 + "]" * 256)}, + "somethingElse": 1, + } + } + ] + ), + "_inserted_at": pa.array([0]), + } + ) + + async def store_in_memory_on_flush( + batch_export_file, + records_since_last_flush, + bytes_since_last_flush, + flush_counter, + last_inserted_at, + is_last, + error, + ): + assert writer.records_since_last_flush == record_batch.num_rows + in_memory_file_obj.write(batch_export_file.read()) + inserted_ats_seen.append(last_inserted_at) + + writer = JSONLBatchExportWriter(max_bytes=1, flush_callable=store_in_memory_on_flush) + + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + assert writer.records_total == record_batch.num_rows == 1 + + in_memory_file_obj.seek(0) + lines = in_memory_file_obj.readlines() + line = lines[0] + written_jsonl = json.loads(line) + expected_jsonl = record_batch.to_pylist()[0] + + assert "_inserted_at" not in written_jsonl + assert "interactionTargetElement" not in written_jsonl["properties"]["$web_vitals_INP_event"]["attribution"] + assert "interactionTargetElement" in expected_jsonl["properties"]["$web_vitals_INP_event"]["attribution"] + + del expected_jsonl["properties"]["$web_vitals_INP_event"]["attribution"]["interactionTargetElement"] + + assert written_jsonl == {k: v for k, v in expected_jsonl.items() if k != "_inserted_at"} + assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] + + +@pytest.mark.asyncio +async def test_jsonl_writer_deals_with_nested_user_events(): + """Test very nested user event record batches are written as valid JSONL.""" + in_memory_file_obj = io.BytesIO() + inserted_ats_seen: list[LastInsertedAt] = [] + + record_batch = pa.RecordBatch.from_pydict( + { + "event": pa.array(["my_event"]), + "properties": pa.array([{"we_have_to_go_deeper": json.loads("[" * 256 + "]" * 256)}]), + "_inserted_at": pa.array([0]), + } + ) + + async def store_in_memory_on_flush( + batch_export_file, + records_since_last_flush, + bytes_since_last_flush, + flush_counter, + last_inserted_at, + is_last, + error, + ): + assert writer.records_since_last_flush == record_batch.num_rows + in_memory_file_obj.write(batch_export_file.read()) + inserted_ats_seen.append(last_inserted_at) + + writer = JSONLBatchExportWriter(max_bytes=1, flush_callable=store_in_memory_on_flush) + + record_batch = record_batch.sort_by("_inserted_at") + async with writer.open_temporary_file(): + await writer.write_record_batch(record_batch) + + assert writer.records_total == record_batch.num_rows + + in_memory_file_obj.seek(0) + lines = in_memory_file_obj.readlines() + line = lines[0] + written_jsonl = json.loads(line) + expected_jsonl = record_batch.to_pylist()[0] + + assert "_inserted_at" not in written_jsonl + assert written_jsonl == {k: v for k, v in expected_jsonl.items() if k != "_inserted_at"} + assert inserted_ats_seen == [record_batch.column("_inserted_at")[-1].as_py()] diff --git a/posthog/temporal/tests/conftest.py b/posthog/temporal/tests/conftest.py index f7802d6252875..f88d74009385d 100644 --- a/posthog/temporal/tests/conftest.py +++ b/posthog/temporal/tests/conftest.py @@ -1,14 +1,14 @@ import asyncio import random +import psycopg import pytest import pytest_asyncio import temporalio.worker from asgiref.sync import sync_to_async from django.conf import settings -from temporalio.testing import ActivityEnvironment -import psycopg from psycopg import sql +from temporalio.testing import ActivityEnvironment from posthog.models import Organization, Team from posthog.temporal.common.clickhouse import ClickHouseClient @@ -65,10 +65,10 @@ def activity_environment(): return ActivityEnvironment() -@pytest.fixture(scope="module") -def clickhouse_client(): +@pytest_asyncio.fixture(scope="module") +async def clickhouse_client(): """Provide a ClickHouseClient to use in tests.""" - client = ClickHouseClient( + async with ClickHouseClient( url=settings.CLICKHOUSE_HTTP_URL, user=settings.CLICKHOUSE_USER, password=settings.CLICKHOUSE_PASSWORD, @@ -78,9 +78,8 @@ def clickhouse_client(): # Durting testing, it's useful to enable it to wait for mutations. # Otherwise, tests that rely on running a mutation may become flaky. mutations_sync=2, - ) - - yield client + ) as client: + yield client @pytest_asyncio.fixture diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index 93630571c3a7a..91dd413468312 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -21,12 +21,17 @@ create_external_data_job_model_activity, ) from posthog.temporal.data_imports.workflow_activities.import_data import ImportDataActivityInputs, import_data_activity +from posthog.temporal.data_imports.workflow_activities.sync_new_schemas import ( + SyncNewSchemasActivityInputs, + sync_new_schemas_activity, +) from posthog.warehouse.external_data_source.jobs import create_external_data_job from posthog.warehouse.models import ( get_latest_run_if_exists, ExternalDataJob, ExternalDataSource, ExternalDataSchema, + get_external_data_job, ) from posthog.temporal.data_imports.pipelines.schemas import ( @@ -195,19 +200,16 @@ async def test_create_external_job_activity_update_schemas(activity_environment, source_type="Stripe", ) - schema = await sync_to_async(ExternalDataSchema.objects.create)( + await sync_to_async(ExternalDataSchema.objects.create)( name=PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING[new_source.source_type][0], team_id=team.id, source_id=new_source.pk, should_sync=True, ) - inputs = CreateExternalDataJobModelActivityInputs(team_id=team.id, source_id=new_source.pk, schema_id=schema.id) - - run_id, _ = await activity_environment.run(create_external_data_job_model_activity, inputs) + inputs = SyncNewSchemasActivityInputs(source_id=str(new_source.pk), team_id=team.id) - runs = ExternalDataJob.objects.filter(id=run_id) - assert await sync_to_async(runs.exists)() + await activity_environment.run(sync_new_schemas_activity, inputs) all_schemas = await sync_to_async(get_all_schemas_for_source_id)(new_source.pk, team.id) @@ -379,9 +381,7 @@ async def setup_job_1(): schema=customer_schema, ) - new_job = await sync_to_async( - ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").prefetch_related("schema").get - )() + new_job = await get_external_data_job(new_job.id) inputs = ImportDataActivityInputs( team_id=team.id, @@ -403,16 +403,17 @@ async def setup_job_2(): job_inputs={"stripe_secret_key": "test-key", "stripe_account_id": "acct_id"}, ) + charge_schema = await _create_schema("Charge", new_source, team) + new_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( team_id=team.id, pipeline_id=new_source.pk, status=ExternalDataJob.Status.RUNNING, rows_synced=0, + schema=charge_schema, ) - new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - - charge_schema = await _create_schema("Charge", new_source, team) + new_job = await get_external_data_job(new_job.id) inputs = ImportDataActivityInputs( team_id=team.id, @@ -698,6 +699,7 @@ async def mock_async_func(inputs): import_data_activity, create_source_templates, check_billing_limits_activity, + sync_new_schemas_activity, ], workflow_runner=UnsandboxedWorkflowRunner(), ): diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index 339a4714be80f..2c30fecdac689 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -149,6 +149,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -320,6 +321,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", @@ -656,6 +658,7 @@ "posthog_team"."session_recording_minimum_duration_milliseconds", "posthog_team"."session_recording_linked_flag", "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", "posthog_team"."session_replay_config", "posthog_team"."survey_config", "posthog_team"."capture_console_log_opt_in", diff --git a/posthog/test/base.py b/posthog/test/base.py index ba2f5ea2f460f..385da83ba11ab 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -576,6 +576,15 @@ def assertQueryMatchesSnapshot(self, query, params=None, replace_all_numbers=Fal r"_condition_X_level", query, ) + + # replace cohort tuples + # like (tuple(cohortpeople.cohort_id, cohortpeople.version), [(35, 0)]) + query = re.sub( + r"\(tuple\((.*)\.cohort_id, (.*)\.version\), \[\(\d+, \d+\)\]\)", + r"(tuple(\1.cohort_id, \2.version), [(2, 0)])", + query, + ) + #### Cohort replacements end # Replace organization_id and notebook_id lookups, for postgres diff --git a/posthog/udf_versioner.py b/posthog/udf_versioner.py index 3be55d183ef42..cb22ed9455be1 100644 --- a/posthog/udf_versioner.py +++ b/posthog/udf_versioner.py @@ -2,16 +2,21 @@ import os import shutil import datetime +import re import xml.etree.ElementTree as ET from xml import etree # For revertible cloud deploys: # 1. Develop using the python files at the top level of `user_scripts`, with schema defined in `docker/clickhouse/user_defined_function.xml` # 2. If you're made breaking changes to UDFs (likely involving changing type definitions), when ready to deploy, increment the version below and run this file -# 3. Copy the `user_defined_function.xml` file in the newly created version folder (e.g. `user_scripts/v4/user_defined_function.xml`) to the `posthog-cloud-infra` repo and deploy it -# 4. After that deploy goes out, it is safe to land and deploy the changes to the `posthog` repo -# If deploys aren't seamless, look into moving the action that copies the `user_scripts` folder to the clickhouse cluster earlier in the deploy process -UDF_VERSION = 0 # Last modified by: @aspicer, 2024-10-01 +# 3. Overwrite `user_defined_function.xml` in the `posthog-cloud-infra` repo (us, eu, and dev) with `user_scripts/latest_user_defined_function.xml` and deploy it +# 4. Land a version of the posthog repo with the updated `user_scripts` folder from the new branch (make sure this PR doesn't include changes to this file with the new version) +# 5. Run the `copy_udfs_to_clickhouse` action in the `posthog_cloud_infra` repo to deploy the `user_scripts` folder to clickhouse +# 6. After that deploy goes out, it is safe to land and deploy the full changes to the `posthog` repo +UDF_VERSION = 2 # Last modified by: @aspicer, 2024-10-16 + +# Clean up all versions less than this +EARLIEST_UDF_VERSION = 1 CLICKHOUSE_XML_FILENAME = "user_defined_function.xml" ACTIVE_XML_CONFIG = "../../docker/clickhouse/user_defined_function.xml" @@ -46,6 +51,14 @@ def prepare_version(force=False): last_version_xml = ET.parse(ACTIVE_XML_CONFIG) last_version_root = last_version_xml.getroot() + + # Remove old versions from last_version + for function in list(last_version_root): + name = function.find("name") + match = re.search(r"_v(\d+)$", name.text) + if match is None or int(match.group(1)) < EARLIEST_UDF_VERSION: + last_version_root.remove(function) + # We want to update the name and the command to include the version, and add it to last version for function in list(base_xml.getroot()): name = function.find("name") diff --git a/posthog/user_permissions.py b/posthog/user_permissions.py index 1a4a0213b3cfd..c891b7da44bcb 100644 --- a/posthog/user_permissions.py +++ b/posthog/user_permissions.py @@ -80,7 +80,7 @@ def team_ids_visible_for_user(self) -> list[int]: @cached_property def project_ids_visible_for_user(self) -> list[int]: - return list({team.project_id for team in self.teams_visible_for_user if team.project_id is not None}) + return list({team.project_id for team in self.teams_visible_for_user}) # Cached properties/functions for efficient lookups in other classes diff --git a/posthog/user_scripts/aggregate_funnel_aarch64 b/posthog/user_scripts/aggregate_funnel_aarch64 index aabb3ff28f7cd..2cf3ee037fe3a 100755 Binary files a/posthog/user_scripts/aggregate_funnel_aarch64 and b/posthog/user_scripts/aggregate_funnel_aarch64 differ diff --git a/posthog/user_scripts/aggregate_funnel_array_trends.py b/posthog/user_scripts/aggregate_funnel_array_trends.py deleted file mode 100755 index 15e93f5452797..0000000000000 --- a/posthog/user_scripts/aggregate_funnel_array_trends.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 -import sys - -from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_cohort_trends.py b/posthog/user_scripts/aggregate_funnel_cohort_trends.py deleted file mode 100755 index 15e93f5452797..0000000000000 --- a/posthog/user_scripts/aggregate_funnel_cohort_trends.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 -import sys - -from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_trends.py b/posthog/user_scripts/aggregate_funnel_trends.py deleted file mode 100755 index 3b7d8a5816b3a..0000000000000 --- a/posthog/user_scripts/aggregate_funnel_trends.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/python3 -import sys -from dataclasses import dataclass, replace -from typing import Any, Union -from collections.abc import Sequence -import json - - -def parse_args(line): - args = json.loads(line) - return [ - int(args["from_step"]), - int(args["num_steps"]), - int(args["conversion_window_limit"]), - str(args["breakdown_attribution_type"]), - str(args["funnel_order_type"]), - args["prop_vals"], # Array(Array(String)) - args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) - ] - - -@dataclass(frozen=True) -class EnteredTimestamp: - timestamp: Any - timings: Any - - -# each one can be multiple steps here -# it only matters when they entered the funnel - you can propagate the time from the previous step when you update -# This function is defined for Clickhouse in user_defined_functions.xml along with types -# num_steps is the total number of steps in the funnel -# conversion_window_limit is in seconds -# events is a array of tuples of (timestamp, breakdown, [steps]) -# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. -# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. -def calculate_funnel_trends_from_user_events( - from_step: int, - num_steps: int, - conversion_window_limit_seconds: int, - breakdown_attribution_type: str, - funnel_order_type: str, - prop_vals: list[Any], - events: Sequence[tuple[float, int, Union[list[str], int, str], list[int]]], -): - default_entered_timestamp = EnteredTimestamp(0, []) - # If the attribution mode is a breakdown step, set this to the integer that represents that step - breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None - - # Results is a map of start intervals to success or failure. If an interval isn't here, it means the - # user didn't enter - results = {} - - # We call this for each possible breakdown value. - def loop_prop_val(prop_val): - # we need to track every distinct entry into the funnel through to the end - filtered_events = ( - ( - (timestamp, interval_start, breakdown, steps) - for (timestamp, interval_start, breakdown, steps) in events - if breakdown == prop_val - ) - if breakdown_attribution_type == "all_events" - else events - ) - interval_start_to_entered_timestamps = {} - - for timestamp, interval_start, breakdown, steps in filtered_events: - for step in reversed(steps): - exclusion = False - if step < 0: - exclusion = True - step = -step - # Special code to handle the first step - # Potential Optimization: we could skip tracking here if the user has already completed the funnel for this interval - if step == 1: - if interval_start not in interval_start_to_entered_timestamps and interval_start not in results: - entered_timestamp = [default_entered_timestamp] * (num_steps + 1) - # Set the interval start at 0, which is what we want to return if this works. - # For strict funnels, we need to track if the "from_step" has been hit - # Abuse the timings field on the 0th index entered_timestamp to have the elt True if we have - entered_timestamp[0] = EnteredTimestamp(interval_start, [True] if from_step == 0 else []) - entered_timestamp[1] = EnteredTimestamp(timestamp, [timestamp]) - interval_start_to_entered_timestamps[interval_start] = entered_timestamp - # list_of_entered_timestamps.append(entered_timestamp) - else: - for entered_timestamp in interval_start_to_entered_timestamps.values(): - in_match_window = ( - timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds - ) - already_reached_this_step_with_same_entered_timestamp = ( - entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp - ) - if in_match_window and not already_reached_this_step_with_same_entered_timestamp: - if exclusion: - # this is a complete failure, exclude this person, don't print anything, don't count - return False - is_unmatched_step_attribution = ( - breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown - ) - if not is_unmatched_step_attribution: - entered_timestamp[step] = replace( - entered_timestamp[step - 1], - timings=[*entered_timestamp[step - 1].timings, timestamp], - ) - # check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps - if entered_timestamp[num_steps].timestamp > 0: - results[entered_timestamp[0].timestamp] = (1, prop_val) - # If we have hit the from_step threshold, record it (abuse the timings field) - elif step == from_step + 1: - entered_timestamp[0].timings.append(True) - - # At the end of the event, clear all steps that weren't done by that event - if funnel_order_type == "strict": - for entered_timestamp in interval_start_to_entered_timestamps.values(): - for i in range(1, len(entered_timestamp)): - if i not in steps: - entered_timestamp[i] = default_entered_timestamp - - # At this point, everything left in entered_timestamps is a failure, if it has made it to from_step - for entered_timestamp in interval_start_to_entered_timestamps.values(): - if entered_timestamp[0].timestamp not in results and len(entered_timestamp[0].timings) > 0: - results[entered_timestamp[0].timestamp] = (-1, prop_val) - - [loop_prop_val(prop_val) for prop_val in prop_vals] - result = [(interval_start, success_bool, prop_val) for interval_start, (success_bool, prop_val) in results.items()] - print(json.dumps({"result": result}), end="\n") # noqa: T201 - - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/aggregate_funnel_x86_64 b/posthog/user_scripts/aggregate_funnel_x86_64 index 8eb41e8979bf2..030bbc0a3fe00 100755 Binary files a/posthog/user_scripts/aggregate_funnel_x86_64 and b/posthog/user_scripts/aggregate_funnel_x86_64 differ diff --git a/posthog/user_scripts/latest_user_defined_function.xml b/posthog/user_scripts/latest_user_defined_function.xml index 67cccc41242c2..c84d6f05a9722 100644 --- a/posthog/user_scripts/latest_user_defined_function.xml +++ b/posthog/user_scripts/latest_user_defined_function.xml @@ -1,8 +1,8 @@ - executable_pool - aggregate_funnel + aggregate_funnel_v1 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -30,13 +30,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort + aggregate_funnel_cohort_v1 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -64,13 +64,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array + aggregate_funnel_array_v1 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -98,13 +98,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test + aggregate_funnel_test_v1 String result @@ -132,13 +132,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel_test.py + v1/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends + aggregate_funnel_trends_v1 Array(Tuple(UInt64, Int8, Nullable(String))) result @@ -174,13 +174,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends + aggregate_funnel_array_trends_v1 Array(Tuple(UInt64, Int8, Array(String))) result @@ -213,13 +213,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends + aggregate_funnel_cohort_trends_v1 Array(Tuple(UInt64, Int8, UInt64)) result @@ -252,13 +252,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test + aggregate_funnel_array_trends_test_v1 String result @@ -290,12 +290,12 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel_array_trends_test.py + v1/aggregate_funnel_array_trends_test.py 600 executable_pool - aggregate_funnel_v0 + aggregate_funnel_v2 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -323,13 +323,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort_v0 + aggregate_funnel_cohort_v2 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -357,13 +357,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array_v0 + aggregate_funnel_array_v2 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -391,13 +391,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test_v0 + aggregate_funnel_test_v2 String result @@ -425,14 +425,14 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel_test.py + v2/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends_v0 - Array(Tuple(UInt64, Int8, Nullable(String))) + aggregate_funnel_trends_v2 + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result UInt8 @@ -463,19 +463,19 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_v0 + aggregate_funnel_array_trends_v2 - Array(Tuple(UInt64, Int8, Array(String))) + Array(Tuple(UInt64, Int8, Array(String), UUID)) result UInt8 @@ -502,19 +502,19 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends_v0 + aggregate_funnel_cohort_trends_v2 - Array(Tuple(UInt64, Int8, UInt64)) + Array(Tuple(UInt64, Int8, UInt64, UUID)) result UInt8 @@ -541,17 +541,17 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test_v0 + aggregate_funnel_array_trends_test_v2 String result @@ -579,11 +579,11 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel_array_trends_test.py + v2/aggregate_funnel_array_trends_test.py 600 \ No newline at end of file diff --git a/posthog/user_scripts/v0/aggregate_funnel_array_trends.py b/posthog/user_scripts/v0/aggregate_funnel_array_trends.py deleted file mode 100755 index 15e93f5452797..0000000000000 --- a/posthog/user_scripts/v0/aggregate_funnel_array_trends.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 -import sys - -from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/v0/aggregate_funnel_cohort_trends.py b/posthog/user_scripts/v0/aggregate_funnel_cohort_trends.py deleted file mode 100755 index 15e93f5452797..0000000000000 --- a/posthog/user_scripts/v0/aggregate_funnel_cohort_trends.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 -import sys - -from aggregate_funnel_trends import parse_args, calculate_funnel_trends_from_user_events - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/v0/aggregate_funnel_trends.py b/posthog/user_scripts/v0/aggregate_funnel_trends.py deleted file mode 100755 index 3b7d8a5816b3a..0000000000000 --- a/posthog/user_scripts/v0/aggregate_funnel_trends.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/python3 -import sys -from dataclasses import dataclass, replace -from typing import Any, Union -from collections.abc import Sequence -import json - - -def parse_args(line): - args = json.loads(line) - return [ - int(args["from_step"]), - int(args["num_steps"]), - int(args["conversion_window_limit"]), - str(args["breakdown_attribution_type"]), - str(args["funnel_order_type"]), - args["prop_vals"], # Array(Array(String)) - args["value"], # Array(Tuple(Nullable(Float64), Nullable(DateTime), Array(String), Array(Int8))) - ] - - -@dataclass(frozen=True) -class EnteredTimestamp: - timestamp: Any - timings: Any - - -# each one can be multiple steps here -# it only matters when they entered the funnel - you can propagate the time from the previous step when you update -# This function is defined for Clickhouse in user_defined_functions.xml along with types -# num_steps is the total number of steps in the funnel -# conversion_window_limit is in seconds -# events is a array of tuples of (timestamp, breakdown, [steps]) -# steps is an array of integers which represent the steps that this event qualifies for. it looks like [1,3,5,6]. -# negative integers represent an exclusion on that step. each event is either all exclusions or all steps. -def calculate_funnel_trends_from_user_events( - from_step: int, - num_steps: int, - conversion_window_limit_seconds: int, - breakdown_attribution_type: str, - funnel_order_type: str, - prop_vals: list[Any], - events: Sequence[tuple[float, int, Union[list[str], int, str], list[int]]], -): - default_entered_timestamp = EnteredTimestamp(0, []) - # If the attribution mode is a breakdown step, set this to the integer that represents that step - breakdown_step = int(breakdown_attribution_type[5:]) if breakdown_attribution_type.startswith("step_") else None - - # Results is a map of start intervals to success or failure. If an interval isn't here, it means the - # user didn't enter - results = {} - - # We call this for each possible breakdown value. - def loop_prop_val(prop_val): - # we need to track every distinct entry into the funnel through to the end - filtered_events = ( - ( - (timestamp, interval_start, breakdown, steps) - for (timestamp, interval_start, breakdown, steps) in events - if breakdown == prop_val - ) - if breakdown_attribution_type == "all_events" - else events - ) - interval_start_to_entered_timestamps = {} - - for timestamp, interval_start, breakdown, steps in filtered_events: - for step in reversed(steps): - exclusion = False - if step < 0: - exclusion = True - step = -step - # Special code to handle the first step - # Potential Optimization: we could skip tracking here if the user has already completed the funnel for this interval - if step == 1: - if interval_start not in interval_start_to_entered_timestamps and interval_start not in results: - entered_timestamp = [default_entered_timestamp] * (num_steps + 1) - # Set the interval start at 0, which is what we want to return if this works. - # For strict funnels, we need to track if the "from_step" has been hit - # Abuse the timings field on the 0th index entered_timestamp to have the elt True if we have - entered_timestamp[0] = EnteredTimestamp(interval_start, [True] if from_step == 0 else []) - entered_timestamp[1] = EnteredTimestamp(timestamp, [timestamp]) - interval_start_to_entered_timestamps[interval_start] = entered_timestamp - # list_of_entered_timestamps.append(entered_timestamp) - else: - for entered_timestamp in interval_start_to_entered_timestamps.values(): - in_match_window = ( - timestamp - entered_timestamp[step - 1].timestamp <= conversion_window_limit_seconds - ) - already_reached_this_step_with_same_entered_timestamp = ( - entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp - ) - if in_match_window and not already_reached_this_step_with_same_entered_timestamp: - if exclusion: - # this is a complete failure, exclude this person, don't print anything, don't count - return False - is_unmatched_step_attribution = ( - breakdown_step is not None and step == breakdown_step - 1 and prop_val != breakdown - ) - if not is_unmatched_step_attribution: - entered_timestamp[step] = replace( - entered_timestamp[step - 1], - timings=[*entered_timestamp[step - 1].timings, timestamp], - ) - # check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps - if entered_timestamp[num_steps].timestamp > 0: - results[entered_timestamp[0].timestamp] = (1, prop_val) - # If we have hit the from_step threshold, record it (abuse the timings field) - elif step == from_step + 1: - entered_timestamp[0].timings.append(True) - - # At the end of the event, clear all steps that weren't done by that event - if funnel_order_type == "strict": - for entered_timestamp in interval_start_to_entered_timestamps.values(): - for i in range(1, len(entered_timestamp)): - if i not in steps: - entered_timestamp[i] = default_entered_timestamp - - # At this point, everything left in entered_timestamps is a failure, if it has made it to from_step - for entered_timestamp in interval_start_to_entered_timestamps.values(): - if entered_timestamp[0].timestamp not in results and len(entered_timestamp[0].timings) > 0: - results[entered_timestamp[0].timestamp] = (-1, prop_val) - - [loop_prop_val(prop_val) for prop_val in prop_vals] - result = [(interval_start, success_bool, prop_val) for interval_start, (success_bool, prop_val) in results.items()] - print(json.dumps({"result": result}), end="\n") # noqa: T201 - - -if __name__ == "__main__": - for line in sys.stdin: - calculate_funnel_trends_from_user_events(*parse_args(line)) - sys.stdout.flush() diff --git a/posthog/user_scripts/v0/aggregate_funnel b/posthog/user_scripts/v1/aggregate_funnel similarity index 100% rename from posthog/user_scripts/v0/aggregate_funnel rename to posthog/user_scripts/v1/aggregate_funnel diff --git a/posthog/user_scripts/v0/aggregate_funnel_aarch64 b/posthog/user_scripts/v1/aggregate_funnel_aarch64 similarity index 100% rename from posthog/user_scripts/v0/aggregate_funnel_aarch64 rename to posthog/user_scripts/v1/aggregate_funnel_aarch64 diff --git a/posthog/user_scripts/v0/aggregate_funnel_array_trends_test.py b/posthog/user_scripts/v1/aggregate_funnel_array_trends_test.py similarity index 100% rename from posthog/user_scripts/v0/aggregate_funnel_array_trends_test.py rename to posthog/user_scripts/v1/aggregate_funnel_array_trends_test.py diff --git a/posthog/user_scripts/v0/aggregate_funnel_test.py b/posthog/user_scripts/v1/aggregate_funnel_test.py similarity index 100% rename from posthog/user_scripts/v0/aggregate_funnel_test.py rename to posthog/user_scripts/v1/aggregate_funnel_test.py diff --git a/posthog/user_scripts/v0/aggregate_funnel_x86_64 b/posthog/user_scripts/v1/aggregate_funnel_x86_64 similarity index 100% rename from posthog/user_scripts/v0/aggregate_funnel_x86_64 rename to posthog/user_scripts/v1/aggregate_funnel_x86_64 diff --git a/posthog/user_scripts/v1/user_defined_function.xml b/posthog/user_scripts/v1/user_defined_function.xml new file mode 100644 index 0000000000000..42e4452293452 --- /dev/null +++ b/posthog/user_scripts/v1/user_defined_function.xml @@ -0,0 +1,882 @@ + + + executable_pool + aggregate_funnel + Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_cohort + Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_array + Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_test + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_test.py + 600 + + + + executable_pool + aggregate_funnel_trends + Array(Tuple(UInt64, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends + + Array(Tuple(UInt64, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_cohort_trends + + Array(Tuple(UInt64, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + value + + JSONEachRow + aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_test + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + aggregate_funnel_array_trends_test.py + 600 + + + executable_pool + aggregate_funnel_v0 + Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_cohort_v0 + Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_array_v0 + Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_test_v0 + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel_test.py + 600 + + + + executable_pool + aggregate_funnel_trends_v0 + Array(Tuple(UInt64, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_v0 + + Array(Tuple(UInt64, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_cohort_trends_v0 + + Array(Tuple(UInt64, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_test_v0 + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + v0/aggregate_funnel_array_trends_test.py + 600 + + + executable_pool + aggregate_funnel_v1 + Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_cohort_v1 + Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, UInt64, Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_array_v1 + Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Array(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel steps + 600 + + + + executable_pool + aggregate_funnel_test_v1 + String + result + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UUID, Nullable(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel_test.py + 600 + + + + executable_pool + aggregate_funnel_trends_v1 + Array(Tuple(UInt64, Int8, Nullable(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Nullable(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_v1 + + Array(Tuple(UInt64, Int8, Array(String))) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_cohort_trends_v1 + + Array(Tuple(UInt64, Int8, UInt64)) + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(UInt64) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel trends + 600 + + + + executable_pool + aggregate_funnel_array_trends_test_v1 + String + result + + UInt8 + from_step + + + UInt8 + num_steps + + + UInt64 + conversion_window_limit + + + String + breakdown_attribution_type + + + String + funnel_order_type + + + Array(Array(String)) + prop_vals + + + Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + value + + JSONEachRow + v1/aggregate_funnel_array_trends_test.py + 600 + + \ No newline at end of file diff --git a/posthog/user_scripts/v2/aggregate_funnel b/posthog/user_scripts/v2/aggregate_funnel new file mode 100755 index 0000000000000..e62dd751484cb --- /dev/null +++ b/posthog/user_scripts/v2/aggregate_funnel @@ -0,0 +1,9 @@ +#!/bin/sh + +DIR_NAME=$(dirname "$0") + +case $( uname -m ) in +aarch64) $DIR_NAME/aggregate_funnel_aarch64 "$@";; +*) $DIR_NAME/aggregate_funnel_x86_64 "$@";; +esac + diff --git a/posthog/user_scripts/v2/aggregate_funnel_aarch64 b/posthog/user_scripts/v2/aggregate_funnel_aarch64 new file mode 100755 index 0000000000000..2cf3ee037fe3a Binary files /dev/null and b/posthog/user_scripts/v2/aggregate_funnel_aarch64 differ diff --git a/posthog/user_scripts/v2/aggregate_funnel_array_trends_test.py b/posthog/user_scripts/v2/aggregate_funnel_array_trends_test.py new file mode 100755 index 0000000000000..cf6ab4e33741b --- /dev/null +++ b/posthog/user_scripts/v2/aggregate_funnel_array_trends_test.py @@ -0,0 +1,13 @@ +#!/usr/bin/python3 + +import sys +import json + +if __name__ == "__main__": + for line in sys.stdin: + try: + print(json.dumps({"result": line})) # noqa: T201 + # calculate_funnel_trends_from_user_events(*parse_args(line)) + except Exception as e: + print(json.dumps({"result": json.dumps(str(e))}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/v2/aggregate_funnel_test.py b/posthog/user_scripts/v2/aggregate_funnel_test.py new file mode 100755 index 0000000000000..1eae7c9f36914 --- /dev/null +++ b/posthog/user_scripts/v2/aggregate_funnel_test.py @@ -0,0 +1,14 @@ +#!/usr/bin/python3 +import json + +import sys +import traceback + +if __name__ == "__main__": + for line in sys.stdin: + try: + # calculate_funnel_from_user_events(*parse_args(line)) + print(json.dumps({"result": line})) # noqa: T201 + except Exception as e: + print(json.dumps({"result": json.dumps(str(e) + traceback.format_exc())}), end="\n") # noqa: T201 + sys.stdout.flush() diff --git a/posthog/user_scripts/v2/aggregate_funnel_x86_64 b/posthog/user_scripts/v2/aggregate_funnel_x86_64 new file mode 100755 index 0000000000000..030bbc0a3fe00 Binary files /dev/null and b/posthog/user_scripts/v2/aggregate_funnel_x86_64 differ diff --git a/posthog/user_scripts/v0/user_defined_function.xml b/posthog/user_scripts/v2/user_defined_function.xml similarity index 87% rename from posthog/user_scripts/v0/user_defined_function.xml rename to posthog/user_scripts/v2/user_defined_function.xml index 67cccc41242c2..c84d6f05a9722 100644 --- a/posthog/user_scripts/v0/user_defined_function.xml +++ b/posthog/user_scripts/v2/user_defined_function.xml @@ -1,8 +1,8 @@ - executable_pool - aggregate_funnel + aggregate_funnel_v1 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -30,13 +30,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort + aggregate_funnel_cohort_v1 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -64,13 +64,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array + aggregate_funnel_array_v1 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -98,13 +98,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel steps + v1/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test + aggregate_funnel_test_v1 String result @@ -132,13 +132,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel_test.py + v1/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends + aggregate_funnel_trends_v1 Array(Tuple(UInt64, Int8, Nullable(String))) result @@ -174,13 +174,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends + aggregate_funnel_array_trends_v1 Array(Tuple(UInt64, Int8, Array(String))) result @@ -213,13 +213,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends + aggregate_funnel_cohort_trends_v1 Array(Tuple(UInt64, Int8, UInt64)) result @@ -252,13 +252,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel trends + v1/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test + aggregate_funnel_array_trends_test_v1 String result @@ -290,12 +290,12 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - aggregate_funnel_array_trends_test.py + v1/aggregate_funnel_array_trends_test.py 600 executable_pool - aggregate_funnel_v0 + aggregate_funnel_v2 Array(Tuple(Int8, Nullable(String), Array(Float64), Array(Array(UUID)))) result @@ -323,13 +323,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_cohort_v0 + aggregate_funnel_cohort_v2 Array(Tuple(Int8, UInt64, Array(Float64), Array(Array(UUID)))) result @@ -357,13 +357,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_array_v0 + aggregate_funnel_array_v2 Array(Tuple(Int8, Array(String), Array(Float64), Array(Array(UUID)))) result @@ -391,13 +391,13 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel steps + v2/aggregate_funnel steps 600 executable_pool - aggregate_funnel_test_v0 + aggregate_funnel_test_v2 String result @@ -425,14 +425,14 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the value JSONEachRow - v0/aggregate_funnel_test.py + v2/aggregate_funnel_test.py 600 executable_pool - aggregate_funnel_trends_v0 - Array(Tuple(UInt64, Int8, Nullable(String))) + aggregate_funnel_trends_v2 + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result UInt8 @@ -463,19 +463,19 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_v0 + aggregate_funnel_array_trends_v2 - Array(Tuple(UInt64, Int8, Array(String))) + Array(Tuple(UInt64, Int8, Array(String), UUID)) result UInt8 @@ -502,19 +502,19 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_cohort_trends_v0 + aggregate_funnel_cohort_trends_v2 - Array(Tuple(UInt64, Int8, UInt64)) + Array(Tuple(UInt64, Int8, UInt64, UUID)) result UInt8 @@ -541,17 +541,17 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) value JSONEachRow - v0/aggregate_funnel trends + v2/aggregate_funnel trends 600 executable_pool - aggregate_funnel_array_trends_test_v0 + aggregate_funnel_array_trends_test_v2 String result @@ -579,11 +579,11 @@ This file is autogenerated by udf_versioner.py. Do not edit this, only edit the prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow - v0/aggregate_funnel_array_trends_test.py + v2/aggregate_funnel_array_trends_test.py 600 \ No newline at end of file diff --git a/posthog/utils.py b/posthog/utils.py index 7db447e77c82e..7535df0700638 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -175,8 +175,14 @@ def relative_date_parse_with_delta_mapping( *, always_truncate: bool = False, now: Optional[datetime.datetime] = None, + increase: bool = False, ) -> tuple[datetime.datetime, Optional[dict[str, int]], str | None]: - """Returns the parsed datetime, along with the period mapping - if the input was a relative datetime string.""" + """ + Returns the parsed datetime, along with the period mapping - if the input was a relative datetime string. + + :increase controls whether to add relative delta to the current time or subtract + Should later control this using +/- infront of the input regex + """ try: try: # This supports a few formats, but we primarily care about: @@ -245,9 +251,13 @@ def relative_date_parse_with_delta_mapping( delta_mapping["month"] = 1 delta_mapping["day"] = 1 elif match.group("position") == "End": - delta_mapping["month"] = 12 delta_mapping["day"] = 31 - parsed_dt -= relativedelta(**delta_mapping) # type: ignore + + if increase: + parsed_dt += relativedelta(**delta_mapping) # type: ignore + else: + parsed_dt -= relativedelta(**delta_mapping) # type: ignore + if always_truncate: # Truncate to the start of the hour for hour-precision datetimes, to the start of the day for larger intervals # TODO: Remove this from this function, this should not be the responsibility of it @@ -264,8 +274,11 @@ def relative_date_parse( *, always_truncate: bool = False, now: Optional[datetime.datetime] = None, + increase: bool = False, ) -> datetime.datetime: - return relative_date_parse_with_delta_mapping(input, timezone_info, always_truncate=always_truncate, now=now)[0] + return relative_date_parse_with_delta_mapping( + input, timezone_info, always_truncate=always_truncate, now=now, increase=increase + )[0] def get_js_url(request: HttpRequest) -> str: @@ -1068,6 +1081,20 @@ def filters_override_requested_by_client(request: Request) -> Optional[dict]: return None +def variables_override_requested_by_client(request: Request) -> Optional[dict[str, dict]]: + raw_variables = request.query_params.get("variables_override") + + if raw_variables is not None: + try: + return json.loads(raw_variables) + except Exception: + raise serializers.ValidationError( + {"variables_override": "Invalid JSON passed in variables_override parameter"} + ) + + return None + + def _request_has_key_set(key: str, request: Request, allowed_values: Optional[list[str]] = None) -> bool | str: query_param = request.query_params.get(key) data_value = request.data.get(key) diff --git a/posthog/warehouse/README.md b/posthog/warehouse/README.md index 724bcc677d825..877f15da8175c 100644 --- a/posthog/warehouse/README.md +++ b/posthog/warehouse/README.md @@ -13,3 +13,9 @@ Without this, you'll get the following error when connecting a SQL database to d ``` symbol not found in flat namespace '_bcp_batch' ``` + +If the issue persists, install from source without cache again + +``` +pip install --pre --no-binary :all: pymssql --no-cache +``` diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index 4d7c508dce2de..d954323a9976e 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -153,6 +153,7 @@ class Meta: "prefix", "last_run_at", "schemas", + "job_inputs", ] read_only_fields = [ "id", @@ -690,21 +691,16 @@ def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: if latest_running_job and latest_running_job.workflow_id and latest_running_job.status == "Running": cancel_external_data_workflow(latest_running_job.workflow_id) - all_jobs = ExternalDataJob.objects.filter( - pipeline_id=instance.pk, team_id=instance.team_id, status="Completed" - ).all() - for job in all_jobs: - try: - delete_data_import_folder(job.folder_path()) - except Exception as e: - logger.exception(f"Could not clean up data import folder: {job.folder_path()}", exc_info=e) - pass - for schema in ( ExternalDataSchema.objects.exclude(deleted=True) .filter(team_id=self.team_id, source_id=instance.id, should_sync=True) .all() ): + try: + delete_data_import_folder(schema.folder_path()) + except Exception as e: + logger.exception(f"Could not clean up data import folder: {schema.folder_path()}", exc_info=e) + pass delete_external_data_schedule(str(schema.id)) delete_external_data_schedule(str(instance.id)) diff --git a/posthog/warehouse/api/saved_query.py b/posthog/warehouse/api/saved_query.py index ac8c8e53022dd..2d8ef156aa6b0 100644 --- a/posthog/warehouse/api/saved_query.py +++ b/posthog/warehouse/api/saved_query.py @@ -4,6 +4,7 @@ import structlog from asgiref.sync import async_to_sync from django.db import transaction +from django.db.models import Q from rest_framework import exceptions, filters, request, response, serializers, status, viewsets from rest_framework.decorators import action @@ -155,8 +156,11 @@ def safely_get_queryset(self, queryset): def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: instance: DataWarehouseSavedQuery = self.get_object() - DataWarehouseJoin.objects.filter(source_table_name=instance.name).delete() - DataWarehouseJoin.objects.filter(joining_table_name=instance.name).delete() + + for join in DataWarehouseJoin.objects.filter( + Q(team_id=instance.team_id) & (Q(source_table_name=instance.name) | Q(joining_table_name=instance.name)) + ).exclude(deleted=True): + join.soft_delete() if instance.table is not None: instance.table.soft_delete() diff --git a/posthog/warehouse/api/test/test_external_data_source.py b/posthog/warehouse/api/test/test_external_data_source.py index 8a455a7b89883..11d8569b93c51 100644 --- a/posthog/warehouse/api/test/test_external_data_source.py +++ b/posthog/warehouse/api/test/test_external_data_source.py @@ -400,6 +400,7 @@ def test_get_external_data_source_with_schema(self): "prefix", "last_run_at", "schemas", + "job_inputs", ], ) self.assertEqual( diff --git a/posthog/warehouse/data_load/source_templates.py b/posthog/warehouse/data_load/source_templates.py index 17dd1b7b9fecd..5a7d515bc8536 100644 --- a/posthog/warehouse/data_load/source_templates.py +++ b/posthog/warehouse/data_load/source_templates.py @@ -7,23 +7,31 @@ @database_sync_to_async def database_operations(team_id: int, table_prefix: str) -> None: - customer_join_exists = DataWarehouseJoin.objects.filter( - team_id=team_id, - source_table_name="persons", - source_table_key="properties.email", - joining_table_name=f"{table_prefix}stripe_customer", - joining_table_key="email", - field_name=f"{table_prefix}stripe_customer", - ).exists() + customer_join_exists = ( + DataWarehouseJoin.objects.filter( + team_id=team_id, + source_table_name="persons", + source_table_key="properties.email", + joining_table_name=f"{table_prefix}stripe_customer", + joining_table_key="email", + field_name=f"{table_prefix}stripe_customer", + ) + .exclude(deleted=True) + .exists() + ) - invoice_join_exists = DataWarehouseJoin.objects.filter( - team_id=team_id, - source_table_name="persons", - source_table_key="properties.email", - joining_table_name=f"{table_prefix}stripe_invoice", - joining_table_key="customer_email", - field_name=f"{table_prefix}stripe_invoice", - ).exists() + invoice_join_exists = ( + DataWarehouseJoin.objects.filter( + team_id=team_id, + source_table_name="persons", + source_table_key="properties.email", + joining_table_name=f"{table_prefix}stripe_invoice", + joining_table_key="customer_email", + field_name=f"{table_prefix}stripe_invoice", + ) + .exclude(deleted=True) + .exists() + ) if not customer_join_exists: DataWarehouseJoin.objects.create( diff --git a/posthog/warehouse/models/external_data_job.py b/posthog/warehouse/models/external_data_job.py index 3b85c70029405..49dd8023faa7e 100644 --- a/posthog/warehouse/models/external_data_job.py +++ b/posthog/warehouse/models/external_data_job.py @@ -29,7 +29,10 @@ class Status(models.TextChoices): __repr__ = sane_repr("id") def folder_path(self) -> str: - return f"team_{self.team_id}_{self.pipeline.source_type}_{str(self.schema_id)}".lower().replace("-", "_") + if self.schema: + return self.schema.folder_path() + else: + raise ValueError("Job does not have a schema") def deprecated_folder_path(self) -> str: return f"team_{self.team_id}_{self.pipeline.source_type}_{str(self.pk)}".lower().replace("-", "_") diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 0ae4d5420201a..c90a5c2e472bb 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -60,6 +60,9 @@ class SyncFrequency(models.TextChoices): __repr__ = sane_repr("name") + def folder_path(self) -> str: + return f"team_{self.team_id}_{self.source.source_type}_{str(self.id)}".lower().replace("-", "_") + @property def is_incremental(self): return self.sync_type == self.SyncType.INCREMENTAL @@ -129,7 +132,7 @@ def get_all_schemas_for_source_id(source_id: uuid.UUID, team_id: int): return list(ExternalDataSchema.objects.exclude(deleted=True).filter(team_id=team_id, source_id=source_id).all()) -def sync_old_schemas_with_new_schemas(new_schemas: list, source_id: uuid.UUID, team_id: int): +def sync_old_schemas_with_new_schemas(new_schemas: list[str], source_id: uuid.UUID, team_id: int) -> list[str]: old_schemas = get_all_schemas_for_source_id(source_id=source_id, team_id=team_id) old_schemas_names = [schema.name for schema in old_schemas] @@ -138,6 +141,8 @@ def sync_old_schemas_with_new_schemas(new_schemas: list, source_id: uuid.UUID, t for schema in schemas_to_create: ExternalDataSchema.objects.create(name=schema, team_id=team_id, source_id=source_id, should_sync=False) + return schemas_to_create + def sync_frequency_to_sync_frequency_interval(frequency: str) -> timedelta: if frequency == "5min": diff --git a/posthog/warehouse/models/join.py b/posthog/warehouse/models/join.py index 000b1ba34f9b2..febbf0182f1ca 100644 --- a/posthog/warehouse/models/join.py +++ b/posthog/warehouse/models/join.py @@ -1,6 +1,6 @@ from typing import Optional from warnings import warn - +from datetime import datetime from django.db import models from posthog.hogql.ast import SelectQuery @@ -41,6 +41,11 @@ class DataWarehouseJoin(CreatedMetaFields, UUIDModel, DeletedMetaFields): joining_table_key = models.CharField(max_length=400) field_name = models.CharField(max_length=400) + def soft_delete(self): + self.deleted = True + self.deleted_at = datetime.now() + self.save() + def join_function( self, override_source_table_key: Optional[str] = None, override_joining_table_key: Optional[str] = None ): diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index 928ca1596456c..a83cf2735f56e 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -104,8 +104,10 @@ class TableFormat(models.TextChoices): def soft_delete(self): from posthog.warehouse.models.join import DataWarehouseJoin - DataWarehouseJoin.objects.filter(source_table_name=self.name).delete() - DataWarehouseJoin.objects.filter(joining_table_name=self.name).delete() + for join in DataWarehouseJoin.objects.filter( + Q(team_id=self.team.pk) & (Q(source_table_name=self.name) | Q(joining_table_name=self.name)) + ).exclude(deleted=True): + join.soft_delete() self.deleted = True self.deleted_at = datetime.now() diff --git a/posthog/year_in_posthog/2023.html b/posthog/year_in_posthog/2023.html index 113ec1730c381..5604fb0c8fbcb 100644 --- a/posthog/year_in_posthog/2023.html +++ b/posthog/year_in_posthog/2023.html @@ -20,7 +20,7 @@