diff --git a/.eslintrc.js b/.eslintrc.js index fb43d7405b68f..7aa24b651e2d9 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -130,6 +130,7 @@ module.exports = { 'Radio', 'Divider', 'Popconfirm', + 'Table', ], message: 'please use the Lemon equivalent instead', }, diff --git a/cypress/e2e/experiments.cy.ts b/cypress/e2e/experiments.cy.ts index 77f42c50ccba7..fffd8120d8206 100644 --- a/cypress/e2e/experiments.cy.ts +++ b/cypress/e2e/experiments.cy.ts @@ -54,8 +54,8 @@ describe('Experiments', () => { // Select goal type cy.get('[data-attr="experiment-goal-type-select"]').click() - cy.contains('Trend').should('be.visible') - cy.contains('Conversion funnel').should('be.visible') + cy.get('.Popover__content').contains('Trend').should('be.visible') + cy.get('.Popover__content').contains('Conversion funnel').should('be.visible') // Add secondary metric const secondaryMetricName = `Secondary metric ${Math.floor(Math.random() * 10000000)}` @@ -65,8 +65,8 @@ describe('Experiments', () => { .type(secondaryMetricName) .should('have.value', secondaryMetricName) cy.get('[data-attr="metrics-selector"]').click() - cy.contains('Trends').should('be.visible') - cy.contains('Funnels').should('be.visible') + cy.get('.Popover__content').contains('Funnels').should('be.visible') + cy.get('.Popover__content').contains('Trends').should('be.visible') cy.get('[data-attr="create-annotation-submit"]').click() cy.contains(secondaryMetricName).should('exist') diff --git a/ee/api/test/test_team.py b/ee/api/test/test_team.py index 22ac5c9b17f56..4df41f7d91fa0 100644 --- a/ee/api/test/test_team.py +++ b/ee/api/test/test_team.py @@ -226,7 +226,7 @@ def test_rename_private_project_as_org_member_forbidden(self): self.team.refresh_from_db() self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) - self.assertEqual(self.team.name, "Default Project") + self.assertEqual(self.team.name, "Default project") def test_rename_private_project_current_as_org_outsider_forbidden(self): self.organization_membership.delete() @@ -368,7 +368,7 @@ def test_fetch_team_as_org_admin_works(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertDictContainsSubset( { - "name": "Default Project", + "name": "Default project", "access_control": False, "effective_membership_level": OrganizationMembership.Level.ADMIN, }, @@ -385,7 +385,7 @@ def test_fetch_team_as_org_member_works(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertDictContainsSubset( { - "name": "Default Project", + "name": "Default project", "access_control": False, "effective_membership_level": OrganizationMembership.Level.MEMBER, }, @@ -424,7 +424,7 @@ def test_fetch_private_team_as_org_member_and_project_member(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertDictContainsSubset( { - "name": "Default Project", + "name": "Default project", "access_control": True, "effective_membership_level": OrganizationMembership.Level.MEMBER, }, @@ -448,7 +448,7 @@ def test_fetch_private_team_as_org_member_and_project_admin(self): self.assertEqual(response.status_code, HTTP_200_OK) self.assertDictContainsSubset( { - "name": "Default Project", + "name": "Default project", "access_control": True, "effective_membership_level": OrganizationMembership.Level.ADMIN, }, diff --git a/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png b/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png index 39e6f1913dd58..cde356c5dc702 100644 Binary files a/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png and b/frontend/__snapshots__/components-cards-insight-card--insight-card--dark.png differ diff --git a/frontend/__snapshots__/insights-funnelcorrelationtable--default--dark.png b/frontend/__snapshots__/insights-funnelcorrelationtable--default--dark.png new file mode 100644 index 0000000000000..84a18b7fb79c3 Binary files /dev/null and b/frontend/__snapshots__/insights-funnelcorrelationtable--default--dark.png differ diff --git a/frontend/__snapshots__/insights-funnelcorrelationtable--default--light.png b/frontend/__snapshots__/insights-funnelcorrelationtable--default--light.png new file mode 100644 index 0000000000000..e4be6621b8608 Binary files /dev/null and b/frontend/__snapshots__/insights-funnelcorrelationtable--default--light.png differ diff --git a/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--dark.png b/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--dark.png new file mode 100644 index 0000000000000..9985decbb9d50 Binary files /dev/null and b/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--dark.png differ diff --git a/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--light.png b/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--light.png new file mode 100644 index 0000000000000..130a1b2035d9f Binary files /dev/null and b/frontend/__snapshots__/insights-funnelpropertycorrelationtable--default--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths--light--webkit.png index 9d45290bfa61a..39ca8a9d25273 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths--light--webkit.png differ diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 477ea62ab3826..3b49c3be4f032 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -1928,13 +1928,10 @@ const api = { password: string, schema: string ): Promise { - const queryParams = toParams({ host, port, dbname, user, password, schema }) - return await new ApiRequest() .externalDataSources() .withAction('database_schema') - .withQueryString(queryParams) - .get() + .create({ data: { host, port, dbname, user, password, schema } }) }, }, diff --git a/frontend/src/lib/components/SceneDashboardChoice/sceneDashboardChoiceModalLogic.ts b/frontend/src/lib/components/SceneDashboardChoice/sceneDashboardChoiceModalLogic.ts index 171a80749d209..944e4ef450f25 100644 --- a/frontend/src/lib/components/SceneDashboardChoice/sceneDashboardChoiceModalLogic.ts +++ b/frontend/src/lib/components/SceneDashboardChoice/sceneDashboardChoiceModalLogic.ts @@ -1,7 +1,7 @@ import Fuse from 'fuse.js' import { actions, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' -import { posthog } from 'posthog-js' +import posthog from 'posthog-js' import { Scene } from 'scenes/sceneTypes' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' diff --git a/frontend/src/mocks/fixtures/api/projects/team_id/insights/funnelCorrelation.json b/frontend/src/mocks/fixtures/api/projects/team_id/insights/funnelCorrelation.json new file mode 100644 index 0000000000000..8cec658c64f97 --- /dev/null +++ b/frontend/src/mocks/fixtures/api/projects/team_id/insights/funnelCorrelation.json @@ -0,0 +1,48 @@ +{ + "result": { + "events": [ + { + "success_count": 472, + "success_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "failure_count": 9, + "failure_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "odds_ratio": 3006.964172813488, + "correlation_type": "success", + "event": { + "event": "score updated", + "properties": {}, + "elements": [] + } + }, + { + "success_count": 60, + "success_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "failure_count": 0, + "failure_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "odds_ratio": 2750.7485294117646, + "correlation_type": "success", + "event": { + "event": "onboard teammate clicked", + "properties": {}, + "elements": [] + } + }, + { + "success_count": 65, + "success_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "failure_count": 75, + "failure_people_url": "http://localhost:8000/api/person/funnel/correlation/?breakdown_attribution_type=first_touch&breakdown_normalize_url=False&date_from=-7d&display=FunnelViz", + "odds_ratio": 35.41070956534874, + "correlation_type": "failure", + "event": { + "event": "cancellation prompt shown", + "properties": {}, + "elements": [] + } + } + ], + "skewed": true + }, + "last_refresh": "2024-03-11T18:44:25.153943Z", + "is_cached": false +} diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 250b62893c602..b14394a66b79c 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -5443,6 +5443,9 @@ } }, "type": "object" + }, + "useSessionsTable": { + "type": "boolean" } }, "required": ["properties"], @@ -5504,6 +5507,9 @@ } }, "type": "object" + }, + "useSessionsTable": { + "type": "boolean" } }, "required": ["kind", "properties"], @@ -5605,6 +5611,9 @@ } }, "type": "object" + }, + "useSessionsTable": { + "type": "boolean" } }, "required": ["breakdownBy", "kind", "properties"], @@ -5686,6 +5695,9 @@ } }, "type": "object" + }, + "useSessionsTable": { + "type": "boolean" } }, "required": ["kind", "properties"], diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 831bb80589d78..9ea72e098c8bb 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -969,6 +969,7 @@ export interface WebAnalyticsQueryBase { enabled?: boolean forceSamplingRate?: SamplingRate } + useSessionsTable?: boolean } export interface WebOverviewQuery extends WebAnalyticsQueryBase { diff --git a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx index 548c9c5695ef1..2116d2da6e74d 100644 --- a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx +++ b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx @@ -18,7 +18,7 @@ import { IconSwapHoriz } from 'lib/lemon-ui/icons' import { useState } from 'react' import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' -import { DatabaseSchemaQueryResponseField } from '~/queries/schema' +import { DatabaseSchemaQueryResponseField, NodeKind } from '~/queries/schema' export function ViewLinkModal(): JSX.Element { const { isJoinTableModalOpen } = useValues(viewLinkLogic) @@ -45,7 +45,6 @@ export function ViewLinkModal(): JSX.Element { export function ViewLinkForm(): JSX.Element { const { tableOptions, - selectedJoiningTable, selectedJoiningTableName, selectedSourceTableName, sourceTableKeys, @@ -116,6 +115,7 @@ export function ViewLinkForm(): JSX.Element { )} @@ -132,7 +132,7 @@ export function ViewLinkForm(): JSX.Element { fullWidth onSelect={selectJoiningKey} value={joiningIsUsingHogQLExpression ? '' : selectedJoiningKey ?? undefined} - disabledReason={selectedJoiningTable ? '' : 'Select a table to choose join key'} + disabledReason={selectedJoiningTableName ? '' : 'Select a table to choose join key'} options={[...joiningTableKeys, { value: '', label: HogQL Expression }]} placeholder="Select a key" /> @@ -140,6 +140,7 @@ export function ViewLinkForm(): JSX.Element { )} @@ -195,14 +196,16 @@ export function ViewLinkForm(): JSX.Element { const HogQLDropdown = ({ hogQLValue, onHogQLValueChange, + tableName, }: { hogQLValue: string + tableName: string onHogQLValueChange: (hogQLValue: string) => void }): JSX.Element => { const [isHogQLDropdownVisible, setIsHogQLDropdownVisible] = useState(false) return ( -
+
{ onHogQLValueChange(currentValue) setIsHogQLDropdownVisible(false) diff --git a/frontend/src/scenes/experiments/ExperimentNext.tsx b/frontend/src/scenes/experiments/ExperimentNext.tsx index f782ce044f10e..e1891241ac816 100644 --- a/frontend/src/scenes/experiments/ExperimentNext.tsx +++ b/frontend/src/scenes/experiments/ExperimentNext.tsx @@ -2,21 +2,30 @@ import './Experiment.scss' import { IconPlusSmall, IconTrash } from '@posthog/icons' import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' +import { BindLogic, useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' import { IconChevronRight } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' -import React, { useEffect } from 'react' +import { LemonRadio } from 'lib/lemon-ui/LemonRadio' +import { capitalizeFirstLetter } from 'lib/utils' +import React from 'react' +import { insightDataLogic } from 'scenes/insights/insightDataLogic' +import { insightLogic } from 'scenes/insights/insightLogic' +import { Query } from '~/queries/Query/Query' +import { InsightType } from '~/types' + +import { EXPERIMENT_INSIGHT_ID } from './constants' import { experimentLogic } from './experimentLogic' +import { ExperimentInsightCreator } from './MetricSelector' const Header = (): JSX.Element => { const { currentFormStep } = useValues(experimentLogic) - const steps = ['Info', 'Goal', 'Code'] + const steps = ['Info', 'Goal'] return (
@@ -165,41 +174,157 @@ const StepInfo = (): JSX.Element => { } const StepGoal = (): JSX.Element => { - return
Goal
-} + const { experiment, exposureAndSampleSize, experimentInsightType, groupTypes, aggregationLabel } = + useValues(experimentLogic) + const { setExperiment, setNewExperimentInsight, createExperiment } = useActions(experimentLogic) + + // insightLogic + const logic = insightLogic({ dashboardItemId: EXPERIMENT_INSIGHT_ID }) + const { insightProps } = useValues(logic) + + // insightDataLogic + const { query } = useValues(insightDataLogic(insightProps)) -const StepCode = (): JSX.Element => { - return
Code
+ return ( +
+
+
+

Participant type

+
+ This sets default aggregation type for all metrics and feature flags. You can change this at any + time by updating the metric or feature flag. +
+ + { + const groupTypeIndex = rawGroupTypeIndex !== -1 ? rawGroupTypeIndex : undefined + + setExperiment({ + parameters: { + ...experiment.parameters, + aggregation_group_type_index: groupTypeIndex ?? undefined, + }, + }) + setNewExperimentInsight() + }} + options={[ + { value: -1, label: 'Persons' }, + ...Array.from(groupTypes.values()).map((groupType) => ({ + value: groupType.group_type_index, + label: capitalizeFirstLetter(aggregationLabel(groupType.group_type_index).plural), + })), + ]} + /> +
+
+

Goal type

+ + { + val && + setNewExperimentInsight({ + insight: val, + properties: experiment?.filters?.properties, + }) + }} + options={[ + { + value: InsightType.FUNNELS, + label: ( +
+
Conversion funnel
+
+ Track how many people complete a sequence of actions and/or events +
+
+ ), + }, + { + value: InsightType.TRENDS, + label: ( +
+
Trend
+
+ Track a cumulative total count of a specific event or action +
+
+ ), + }, + ]} + /> +
+
+

Goal criteria

+
+ {experimentInsightType === InsightType.FUNNELS + ? "Create the funnel where you'd like to see an increased conversion rate." + : 'Create a trend goal to track change in a single metric.'} +
+ +
+ +
+
+
+

Goal preview

+
+ + + +
+
+
+
+ { + const { exposure, sampleSize } = exposureAndSampleSize + createExperiment(true, exposure, sampleSize) + }} + > + Create experiment + +
+
+ ) } export function ExperimentNext(): JSX.Element { - const { currentFormStep, props } = useValues(experimentLogic) - const { setCurrentFormStep } = useActions(experimentLogic) - - useEffect(() => { - setCurrentFormStep(0) - }, []) + const { experimentId, editingExistingExperiment, currentFormStep, props } = useValues(experimentLogic) const stepComponents = { 0: , 1: , - 2: , } const CurrentStepComponent = (currentFormStep && stepComponents[currentFormStep]) || return ( -
-
-
- {CurrentStepComponent} -
-
+ <> + {experimentId === 'new' || editingExistingExperiment ? ( +
+
+
+ {CurrentStepComponent} +
+
+ ) : ( +

{`Experiment ${experimentId} draft/results`}

+ )} + ) } diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 35e617d41470e..cfac9849aad1a 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -354,17 +354,7 @@ export const experimentLogic = kea([ setNewExperimentInsight: async ({ filters }) => { let newInsightFilters const aggregationGroupTypeIndex = values.experiment.parameters?.aggregation_group_type_index - if (filters?.insight === InsightType.FUNNELS) { - newInsightFilters = cleanFilters({ - insight: InsightType.FUNNELS, - funnel_viz_type: FunnelVizType.Steps, - date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), - date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), - layout: FunnelLayout.horizontal, - aggregation_group_type_index: aggregationGroupTypeIndex, - ...filters, - }) - } else { + if (filters?.insight === InsightType.TRENDS) { const groupAggregation = aggregationGroupTypeIndex !== undefined ? { math: 'unique_group', math_group_type_index: aggregationGroupTypeIndex } @@ -380,6 +370,17 @@ export const experimentLogic = kea([ ...eventAddition, ...filters, }) + } else { + newInsightFilters = cleanFilters({ + insight: InsightType.FUNNELS, + funnel_viz_type: FunnelVizType.Steps, + date_from: dayjs().subtract(DEFAULT_DURATION, 'day').format('YYYY-MM-DDTHH:mm'), + date_to: dayjs().endOf('d').format('YYYY-MM-DDTHH:mm'), + layout: FunnelLayout.horizontal, + ...(aggregationGroupTypeIndex !== undefined && { + aggregation_group_type_index: aggregationGroupTypeIndex, + }), + }) } actions.updateQuerySource(filtersToQueryNode(newInsightFilters)) @@ -663,7 +664,7 @@ export const experimentLogic = kea([ experimentInsightType: [ (s) => [s.experiment], (experiment): InsightType => { - return experiment?.filters?.insight || InsightType.TRENDS + return experiment?.filters?.insight || InsightType.FUNNELS }, ], isExperimentRunning: [ diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 71150131d7b66..6693835f6564c 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -11,7 +11,7 @@ import { supportLogic } from 'lib/components/Support/supportLogic' import { IconErrorOutline, IconOpenInNew } from 'lib/lemon-ui/icons' import { Link } from 'lib/lemon-ui/Link' import { Tooltip } from 'lib/lemon-ui/Tooltip' -import { posthog } from 'posthog-js' +import posthog from 'posthog-js' import { funnelDataLogic } from 'scenes/funnels/funnelDataLogic' import { entityFilterLogic } from 'scenes/insights/filters/ActionFilter/entityFilterLogic' import { insightLogic } from 'scenes/insights/insightLogic' diff --git a/frontend/src/scenes/insights/aggregationAxisFormat.ts b/frontend/src/scenes/insights/aggregationAxisFormat.ts index 68d4d9ed245e8..965a1ff3834d6 100644 --- a/frontend/src/scenes/insights/aggregationAxisFormat.ts +++ b/frontend/src/scenes/insights/aggregationAxisFormat.ts @@ -23,15 +23,15 @@ export const formatAggregationAxisValue = ( ): string => { value = Number(value) const decimalPlaces = - (trendsFilter as TrendsFilter)?.decimalPlaces || (trendsFilter as Partial)?.decimal_places + (trendsFilter as TrendsFilter)?.decimalPlaces ?? (trendsFilter as Partial)?.decimal_places const aggregationAxisFormat = - (trendsFilter as TrendsFilter)?.aggregationAxisFormat || + (trendsFilter as TrendsFilter)?.aggregationAxisFormat ?? (trendsFilter as Partial)?.aggregation_axis_format const aggregationAxisPrefix = - (trendsFilter as TrendsFilter)?.aggregationAxisPrefix || + (trendsFilter as TrendsFilter)?.aggregationAxisPrefix ?? (trendsFilter as Partial)?.aggregation_axis_prefix const aggregationAxisPostfix = - (trendsFilter as TrendsFilter)?.aggregationAxisPostfix || + (trendsFilter as TrendsFilter)?.aggregationAxisPostfix ?? (trendsFilter as Partial)?.aggregation_axis_postfix let formattedValue = humanFriendlyNumber(value, decimalPlaces) if (aggregationAxisFormat) { diff --git a/frontend/src/scenes/insights/views/Funnels/CorrelationActionsCell.tsx b/frontend/src/scenes/insights/views/Funnels/CorrelationActionsCell.tsx index be2c03904366c..5f221e1f4ec1b 100644 --- a/frontend/src/scenes/insights/views/Funnels/CorrelationActionsCell.tsx +++ b/frontend/src/scenes/insights/views/Funnels/CorrelationActionsCell.tsx @@ -82,8 +82,16 @@ const CorrelationActionsCellComponent = ({ buttons }: CorrelationActionsCellComp visible={popoverOpen} actionable onClickOutside={() => setPopoverOpen(false)} - overlay={buttons.map((props, index) => ( - + overlay={buttons.map(({ onClick, ...props }, index) => ( + { + setPopoverOpen(false) + onClick && onClick(e) + }} + {...props} + /> ))} > } onClick={() => setPopoverOpen(!popoverOpen)} /> diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.scss b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.scss index 54f874cc774d1..2a6aa3cbaeba6 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.scss +++ b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.scss @@ -1,84 +1,12 @@ -.funnel-correlation-table { - margin-top: 1rem; - overflow: hidden; - border: 1px solid var(--border); - border-radius: var(--radius); - - .ant-table { - thead th { - border-bottom: 1px solid var(--border); - } - } - - .funnel-correlation-header { - display: flex; - flex-wrap: wrap; - place-content: space-between space-between; - align-items: center; - align-self: stretch; - padding: 0.25rem 0.5rem; - background: var(--mid); - border-top-left-radius: var(--radius); - border-top-right-radius: var(--radius); - - .table-header { - display: flex; - align-items: center; - font-size: 11px; - font-style: normal; - font-weight: bold; - line-height: 16px; - color: var(--default); - text-transform: uppercase; - letter-spacing: 0.02em; +.FunnelCorrelationTable { + .table-options { + .LemonCheckbox:not(:last-child) label { + border-right: none; + border-radius: var(--radius) 0 0 var(--radius); } - .table-options { - display: flex; - flex-grow: 1; - align-items: center; - justify-content: flex-end; - - .title { - /* identical to box height, or 145% */ - - display: flex; - align-items: center; - margin: 5px; - font-family: var(--font-sans); - font-size: 11px; - font-style: normal; - font-weight: 600; - line-height: 16px; - color: var(--muted); - text-transform: uppercase; - letter-spacing: 0.02em; - } - - .LemonCheckbox:not(:last-child) label { - border-right: none; - border-radius: var(--radius) 0 0 var(--radius); - } - - .LemonCheckbox:last-child label { - border-radius: 0 var(--radius) var(--radius) 0; - } - } - } - - .column-info { - padding-left: 4px; - color: var(--muted-alt); - cursor: pointer; - } - - .nested-properties-table { - margin: 1rem; - border: 1px solid var(--border); - border-radius: var(--radius); - - thead th { - border-bottom: 1px solid var(--border); + .LemonCheckbox:last-child label { + border-radius: 0 var(--radius) var(--radius) 0; } } } diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx new file mode 100644 index 0000000000000..5e3c0b5b1eb5a --- /dev/null +++ b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.stories.tsx @@ -0,0 +1,60 @@ +import { Meta, StoryFn, StoryObj } from '@storybook/react' +import { BindLogic } from 'kea' +import { useState } from 'react' +import { insightLogic } from 'scenes/insights/insightLogic' + +import { mswDecorator } from '~/mocks/browser' +import funnelCorrelation from '~/mocks/fixtures/api/projects/team_id/insights/funnelCorrelation.json' +import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' +import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz' +import { getCachedResults } from '~/queries/nodes/InsightViz/utils' +import { InsightLogicProps } from '~/types' + +import { FunnelCorrelationTable } from './FunnelCorrelationTable' + +type Story = StoryObj +const meta: Meta = { + title: 'Insights/FunnelCorrelationTable', + component: FunnelCorrelationTable, + decorators: [ + mswDecorator({ + post: { + 'api/projects/:team_id/insights/funnel/correlation/': funnelCorrelation, + }, + }), + ], +} +export default meta + +let uniqueNode = 0 + +const Template: StoryFn = () => { + const [dashboardItemId] = useState(() => `FunnelCorrelationTableStory.${uniqueNode++}`) + + // eslint-disable-next-line @typescript-eslint/no-var-requires + const insight = require('../../../../mocks/fixtures/api/projects/team_id/insights/funnelLeftToRight.json') + const filters = insight.filters + const cachedInsight = { ...insight, short_id: dashboardItemId, filters } + + const insightProps = { dashboardItemId, doNotLoad: true, cachedInsight } as InsightLogicProps + const querySource = filtersToQueryNode(filters) + + const dataNodeLogicProps: DataNodeLogicProps = { + query: querySource, + key: insightVizDataNodeKey(insightProps), + cachedResults: getCachedResults(insightProps.cachedInsight, querySource), + doNotLoad: insightProps.doNotLoad, + } + + return ( + + + + + + ) +} + +export const Default: Story = Template.bind({}) +Default.args = {} diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.tsx index 64c09c60ba08f..6d8d366250397 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.tsx +++ b/frontend/src/scenes/insights/views/Funnels/FunnelCorrelationTable.tsx @@ -1,9 +1,8 @@ import './FunnelCorrelationTable.scss' -import { IconCollapse, IconExpand, IconInfo, IconTrending } from '@posthog/icons' -import { LemonCheckbox } from '@posthog/lemon-ui' -import { ConfigProvider, Empty, Table } from 'antd' -import Column from 'antd/lib/table/Column' +import { IconTrending } from '@posthog/icons' +import { LemonCheckbox, LemonTable } from '@posthog/lemon-ui' +import { Empty } from 'antd' import { useActions, useValues } from 'kea' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { VisibilitySensor } from 'lib/components/VisibilitySensor/VisibilitySensor' @@ -11,7 +10,6 @@ import { IconSelectEvents, IconTrendingDown } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { Link } from 'lib/lemon-ui/Link' import { Spinner } from 'lib/lemon-ui/Spinner/Spinner' -import { Tooltip } from 'lib/lemon-ui/Tooltip' import { capitalizeFirstLetter } from 'lib/utils' import { useEffect } from 'react' import { funnelCorrelationLogic } from 'scenes/funnels/funnelCorrelationLogic' @@ -86,7 +84,7 @@ export function FunnelCorrelationTable(): JSX.Element | null { return ( <> -

+
{is_success ? ( ) : ( @@ -99,7 +97,7 @@ export function FunnelCorrelationTable(): JSX.Element | null { )} -

+
{capitalizeFirstLetter(aggregationTargetLabel.plural)}{' '} {querySource?.aggregation_group_type_index != undefined ? 'that' : 'who'} converted were{' '} @@ -153,67 +151,70 @@ export function FunnelCorrelationTable(): JSX.Element | null { } return ( -
+

Correlated properties

- record.event.event} - className="nested-properties-table" - scroll={{ x: 'max-content' }} + rowKey={(record: FunnelCorrelation) => 'nested' + record.event.event} pagination={{ pageSize: 5, hideOnSinglePage: true, - onChange: (page, page_size) => + onBackward: () => + reportCorrelationInteraction( + FunnelCorrelationResultsType.EventWithProperties, + 'pagination change', + { direction: 'backward', page_size: 5 } + ), + onForward: () => reportCorrelationInteraction( FunnelCorrelationResultsType.EventWithProperties, 'pagination change', - { page, page_size } + { direction: 'forward', page_size: 5 } ), }} - > - renderOddsRatioTextRecord(record)} - align="left" - /> - renderSuccessCount(record)} - width={90} - align="center" - /> - renderFailureCount(record)} - width={120} - align="center" - /> - - } - align="center" - width={30} - /> -
+ columns={[ + { + title: 'Property', + key: 'eventName', + render: (_, record) => renderOddsRatioTextRecord(record), + }, + { + title: 'Completed', + key: 'success_count', + render: (_, record) => renderSuccessCount(record), + width: 90, + align: 'center', + }, + { + title: 'Dropped off', + key: 'failure_count', + render: (_, record) => renderFailureCount(record), + width: 120, + align: 'center', + }, + { + key: 'actions', + width: 30, + align: 'center', + render: (_, record) => , + }, + ]} + />
) } return steps.length > 1 ? ( -
- - +
+ + CORRELATED EVENTS - -

CORRELATION

+ +

CORRELATION

- - loadedEventCorrelationsTableOnce ? ( - - ) : ( - <> - {/* eslint-disable-next-line react/forbid-dom-props */} -

- Highlight events which are likely to have affected the conversion rate within the - funnel.{' '} - - Learn more about correlation analysis. - -

- loadEventCorrelations({})} - type="secondary" - className="mx-auto mt-2" - > - Load results - - - ) - } - > - record.event.event} - pagination={{ - pageSize: 5, - hideOnSinglePage: true, - onChange: () => - reportCorrelationInteraction(FunnelCorrelationResultsType.Events, 'load more'), - }} - expandable={{ - expandedRowRender: (record) => renderNestedTable(record.event.event), - expandedRowKeys: nestedTableExpandedKeys, - rowExpandable: () => querySource?.aggregation_group_type_index === undefined, - expandIcon: ({ expanded, onExpand, record, expandable }) => { - if (!expandable) { - return null - } - return expanded ? ( - - } - active - noPadding - onClick={(e) => { - removeNestedTableExpandedKey(record.event.event) - onExpand(record, e) - }} - /> - + + renderOddsRatioTextRecord(record), + }, + { + title: 'Completed', + tooltip: `${capitalizeFirstLetter(aggregationTargetLabel.plural)} ${ + querySource?.aggregation_group_type_index != undefined ? 'that' : 'who' + } performed the event and completed the entire funnel.`, + dataIndex: 'success_count', + render: (_, record) => renderSuccessCount(record), + align: 'center', + width: 90, + }, + { + title: 'Dropped off', + dataIndex: 'failure_count', + tooltip: `${capitalizeFirstLetter(aggregationTargetLabel.plural)} ${ + querySource?.aggregation_group_type_index != undefined ? 'that' : 'who' + } performed the event and did not complete the entire funnel.`, + render: (_, record) => renderFailureCount(record), + align: 'center', + width: 120, + }, + { + key: 'actions', + width: 30, + render: (_, record: FunnelCorrelation) => , + }, + ]} + dataSource={correlationValues} + emptyState={ +
+
+ {loadedEventCorrelationsTableOnce ? ( + ) : ( - + <> +

+ Highlight events which are likely to have affected the conversion rate + within the funnel.{' '} + + Learn more about correlation analysis. + +

} - noPadding - onClick={(e) => { - !eventHasPropertyCorrelations(record.event.event) && - loadEventWithPropertyCorrelations(record.event.event) - addNestedTableExpandedKey(record.event.event) - onExpand(record, e) - }} - /> -
- ) - }, - }} - > - renderOddsRatioTextRecord(record)} - align="left" - ellipsis - /> - - Completed - - - -
- } - key="success_count" - render={(_, record: FunnelCorrelation) => renderSuccessCount(record)} - width={90} - align="center" - /> - - Dropped off - - {capitalizeFirstLetter(aggregationTargetLabel.plural)}{' '} - {querySource?.aggregation_group_type_index != undefined - ? 'that' - : 'who'}{' '} - performed the event and did not complete the entire funnel. - - } - > - - -
- } - key="failure_count" - render={(_, record: FunnelCorrelation) => renderFailureCount(record)} - width={120} - align="center" - /> - } - width={30} - /> -
-
+ onClick={() => loadEventCorrelations({})} + type="secondary" + className="mx-auto mt-2" + > + Load results + + + )} +
+
+ } + loading={correlationsLoading} + size="small" + rowKey={(record) => record.event.event} + pagination={{ + pageSize: 5, + hideOnSinglePage: true, + onBackward: () => + reportCorrelationInteraction(FunnelCorrelationResultsType.Events, 'load more'), + onForward: () => reportCorrelationInteraction(FunnelCorrelationResultsType.Events, 'load more'), + }} + expandable={{ + expandedRowRender: (record) => renderNestedTable(record.event.event), + isRowExpanded: (record) => nestedTableExpandedKeys.includes(record.event.event), + rowExpandable: () => querySource?.aggregation_group_type_index === undefined, + onRowExpand: (record) => { + !eventHasPropertyCorrelations(record.event.event) && + loadEventWithPropertyCorrelations(record.event.event) + addNestedTableExpandedKey(record.event.event) + }, + onRowCollapse: (record) => { + removeNestedTableExpandedKey(record.event.event) + }, + }} + />
) : null diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx new file mode 100644 index 0000000000000..a6f3ffab3a668 --- /dev/null +++ b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.stories.tsx @@ -0,0 +1,62 @@ +import { Meta, StoryFn, StoryObj } from '@storybook/react' +import { BindLogic } from 'kea' +import { taxonomicFilterMocksDecorator } from 'lib/components/TaxonomicFilter/__mocks__/taxonomicFilterMocksDecorator' +import { useState } from 'react' +import { insightLogic } from 'scenes/insights/insightLogic' + +import { mswDecorator } from '~/mocks/browser' +import funnelCorrelation from '~/mocks/fixtures/api/projects/team_id/insights/funnelCorrelation.json' +import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' +import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' +import { insightVizDataNodeKey } from '~/queries/nodes/InsightViz/InsightViz' +import { getCachedResults } from '~/queries/nodes/InsightViz/utils' +import { InsightLogicProps } from '~/types' + +import { FunnelPropertyCorrelationTable } from './FunnelPropertyCorrelationTable' + +type Story = StoryObj +const meta: Meta = { + title: 'Insights/FunnelPropertyCorrelationTable', + component: FunnelPropertyCorrelationTable, + decorators: [ + mswDecorator({ + post: { + 'api/projects/:team_id/insights/funnel/correlation/': funnelCorrelation, + }, + }), + taxonomicFilterMocksDecorator, + ], +} +export default meta + +let uniqueNode = 0 + +const Template: StoryFn = () => { + const [dashboardItemId] = useState(() => `FunnelPropertyCorrelationTableStory.${uniqueNode++}`) + + // eslint-disable-next-line @typescript-eslint/no-var-requires + const insight = require('../../../../mocks/fixtures/api/projects/team_id/insights/funnelLeftToRight.json') + const filters = insight.filters + const cachedInsight = { ...insight, short_id: dashboardItemId, filters } + + const insightProps = { dashboardItemId, doNotLoad: true, cachedInsight } as InsightLogicProps + const querySource = filtersToQueryNode(filters) + + const dataNodeLogicProps: DataNodeLogicProps = { + query: querySource, + key: insightVizDataNodeKey(insightProps), + cachedResults: getCachedResults(insightProps.cachedInsight, querySource), + doNotLoad: insightProps.doNotLoad, + } + + return ( + + + + + + ) +} + +export const Default: Story = Template.bind({}) +Default.args = {} diff --git a/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.tsx b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.tsx index 1099c1f56c483..70093d8891962 100644 --- a/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.tsx +++ b/frontend/src/scenes/insights/views/Funnels/FunnelPropertyCorrelationTable.tsx @@ -1,9 +1,8 @@ import './FunnelCorrelationTable.scss' -import { IconInfo, IconTrending } from '@posthog/icons' -import { LemonButton, LemonCheckbox } from '@posthog/lemon-ui' -import { ConfigProvider, Empty, Table } from 'antd' -import Column from 'antd/lib/table/Column' +import { IconTrending } from '@posthog/icons' +import { LemonButton, LemonCheckbox, LemonTable } from '@posthog/lemon-ui' +import { Empty } from 'antd' import { useActions, useValues } from 'kea' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { PropertySelect } from 'lib/components/PropertySelect/PropertySelect' @@ -12,7 +11,6 @@ import { VisibilitySensor } from 'lib/components/VisibilitySensor/VisibilitySens import { IconSelectProperties, IconTrendingDown } from 'lib/lemon-ui/icons' import { Link } from 'lib/lemon-ui/Link' import { Popover } from 'lib/lemon-ui/Popover' -import { Tooltip } from 'lib/lemon-ui/Tooltip' import { capitalizeFirstLetter } from 'lib/utils' import { useEffect } from 'react' import { useState } from 'react' @@ -105,7 +103,7 @@ export function FunnelPropertyCorrelationTable(): JSX.Element | null { return ( <> -

+
{is_success ? ( ) : ( @@ -118,7 +116,7 @@ export function FunnelPropertyCorrelationTable(): JSX.Element | null { )} -

+
{capitalizeFirstLetter(aggregationTargetLabel.plural)}{' '} {querySource?.aggregation_group_type_index != undefined ? 'that' : 'who'} converted were{' '} @@ -135,15 +133,17 @@ export function FunnelPropertyCorrelationTable(): JSX.Element | null { return steps.length > 1 ? ( -
-
-
+
+
+
CORRELATED PROPERTIES
-
+
-

PROPERTIES

+

+ PROPERTIES +

setIsPropertiesOpen(false)} @@ -163,7 +163,14 @@ export function FunnelPropertyCorrelationTable(): JSX.Element | null { {propertyNames.length === 1 && propertyNames[0] === '$all' ? ( <>All properties selected ) : ( - setAllProperties()}> + { + setAllProperties() + setIsPropertiesOpen(false) + }} + > Select all properties )} @@ -183,7 +190,9 @@ export function FunnelPropertyCorrelationTable(): JSX.Element | null {
-

CORRELATION

+

+ CORRELATION +

- - loadedPropertyCorrelationsTableOnce ? ( - - ) : ( - <> - {/* eslint-disable-next-line react/forbid-dom-props */} -

- Highlight properties which are likely to have affected the conversion rate within - the funnel.{' '} - - Learn more about correlation analysis. - -

- setIsPropertiesOpen(true)} - className="mx-auto mt-2" - > - Select properties - - - ) + + renderOddsRatioTextRecord(record), + }, + { + title: 'Completed', + tooltip: `${capitalizeFirstLetter(aggregationTargetLabel.plural)} ${ + querySource?.aggregation_group_type_index != undefined ? 'that' : 'who' + } have this property and completed the entire funnel.`, + key: 'success_count', + render: (_, record) => renderSuccessCount(record), + width: 90, + align: 'center', + }, + { + title: 'Dropped off', + tooltip: `${capitalizeFirstLetter(aggregationTargetLabel.plural)} ${ + querySource?.aggregation_group_type_index != undefined ? 'that' : 'who' + } have this property and did not complete the entire funnel.`, + key: 'failure_count', + render: (_, record) => renderFailureCount(record), + width: 120, + align: 'center', + }, + { + key: 'actions', + width: 30, + align: 'center', + render: (_, record) => , + }, + ]} + dataSource={propertyCorrelationValues} + loading={propertyCorrelationsLoading} + rowKey={(record) => record.event.event} + size="small" + emptyState={ +
+
+ {loadedPropertyCorrelationsTableOnce ? ( + + ) : ( + <> +

+ Highlight properties which are likely to have affected the conversion rate + within the funnel.{' '} + + Learn more about correlation analysis. + +

+ setIsPropertiesOpen(true)} + className="mx-auto mt-2" + > + Select properties + + + )} +
+
} - > - record.event.event} - pagination={{ - pageSize: 5, - hideOnSinglePage: true, - onChange: (page, page_size) => - reportCorrelationInteraction( - FunnelCorrelationResultsType.Properties, - 'pagination change', - { - page, - page_size, - } - ), - }} - > - renderOddsRatioTextRecord(record)} - align="left" - /> - - Completed - - - - - } - key="success_count" - render={(_, record: FunnelCorrelation) => renderSuccessCount(record)} - width={90} - align="center" - /> - - Dropped off - - {capitalizeFirstLetter(aggregationTargetLabel.plural)}{' '} - {querySource?.aggregation_group_type_index != undefined - ? 'that' - : 'who'}{' '} - have this property and did not complete the entire funnel. - - } - > - - - - } - key="failure_count" - render={(_, record: FunnelCorrelation) => renderFailureCount(record)} - width={120} - align="center" - /> - ( - - )} - align="center" - width={30} - /> -
-
+ pagination={{ + pageSize: 5, + hideOnSinglePage: true, + onBackward: () => + reportCorrelationInteraction(FunnelCorrelationResultsType.Properties, 'pagination change', { + direction: 'backward', + page_size: 5, + }), + onForward: () => + reportCorrelationInteraction(FunnelCorrelationResultsType.Properties, 'pagination change', { + direction: 'forward', + page_size: 5, + }), + }} + />
) : null diff --git a/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx b/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx index 7899d14ee5c0b..b247cc7d4476a 100644 --- a/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx +++ b/frontend/src/scenes/onboarding/OnboardingProductConfiguration.tsx @@ -1,11 +1,45 @@ import { LemonDivider, LemonSelect, LemonSwitch } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import React, { useEffect } from 'react' +import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { OnboardingStepKey } from './onboardingLogic' import { onboardingProductConfigurationLogic, ProductConfigOption } from './onboardingProductConfigurationLogic' import { OnboardingStep } from './OnboardingStep' +type ConfigType = 'toggle' | 'select' +type PluginType = 'plugin' +type ConfigOption = + | { + title: string + description?: string + type: ConfigType + selectOptions?: { label: string; value: string | number }[] + value: boolean | string | number + onChange: (newValue: boolean | string | number) => void + } + | { + title: string + description?: string + type: PluginType + value: boolean + onChange: (newValue: boolean) => void + } + +interface PluginContent { + title: string + description: string +} +type PluginContentMapping = Record +const pluginContentMapping: PluginContentMapping = { + // 1 is the id of the GEO IP plugin + 1: { + title: 'Capture location information', + description: + 'Enrich PostHog events and persons with IP location data. This is useful for understanding where your users are coming from. This setting can be found under the data pipelines apps.', + }, +} + export const OnboardingProductConfiguration = ({ stepKey = OnboardingStepKey.PRODUCT_CONFIGURATION, options, @@ -14,53 +48,86 @@ export const OnboardingProductConfiguration = ({ options: ProductConfigOption[] }): JSX.Element | null => { const { configOptions } = useValues(onboardingProductConfigurationLogic) + const { defaultEnabledPlugins } = useValues(pluginsLogic) const { setConfigOptions, saveConfiguration } = useActions(onboardingProductConfigurationLogic) + const { toggleEnabled } = useActions(pluginsLogic) + useEffect(() => { setConfigOptions(options) }, []) - return configOptions ? ( + const combinedList: ConfigOption[] = [ + ...configOptions.map((option) => ({ + title: option.title, + description: option.description, + type: option.type as ConfigType, + selectOptions: option.selectOptions, + value: option.value, + onChange: (newValue: boolean | string | number) => { + setConfigOptions( + configOptions.map((o) => (o.teamProperty === option.teamProperty ? { ...o, value: newValue } : o)) + ) + }, + })), + ...defaultEnabledPlugins.map((plugin) => { + const pluginContent = pluginContentMapping[plugin.id] + return { + title: pluginContent?.title || plugin.name, + description: pluginContent?.description || plugin.description, + type: 'plugin' as PluginType, + value: plugin.pluginConfig?.enabled || false, + onChange: (newValue: boolean) => { + toggleEnabled({ + id: plugin.pluginConfig?.id, + enabled: newValue, + }) + }, + } + }), + ] + + return combinedList.length > 0 ? ( -

Options

- {configOptions?.map((option: ProductConfigOption, idx) => ( - - -
- - {option.type == 'toggle' ? ( - { - setConfigOptions( - configOptions.map((o) => - o.teamProperty === option.teamProperty ? { ...o, value: checked } : o - ) - ) - }} - className="self-center" - fullWidth={true} - checked={option.value || false} - /> - ) : ( -
- { - setConfigOptions( - configOptions.map((o) => - o.teamProperty === option.teamProperty ? { ...o, value: v } : o - ) - ) - }} - options={option.selectOptions || []} - value={option.value} - /> +
+

Options

+ {combinedList.map((item, idx) => ( + + +
+
+ +

{item.description}

+
+
+ {item.type === 'toggle' ? ( + + ) : item.type === 'plugin' ? ( + + ) : ( +
+ +
+ )}
- )} -

{option.description}

-
-
- ))} +
+ + ))} +
) : null } diff --git a/frontend/src/scenes/plugins/edit/PluginDrawer.tsx b/frontend/src/scenes/plugins/edit/PluginDrawer.tsx index 2543867ce3c28..6313663808f93 100644 --- a/frontend/src/scenes/plugins/edit/PluginDrawer.tsx +++ b/frontend/src/scenes/plugins/edit/PluginDrawer.tsx @@ -118,7 +118,7 @@ export function PluginDrawer(): JSX.Element {
{endWithPunctation(editingPlugin.description)} -
+
{editingPlugin.url && ( diff --git a/frontend/src/scenes/plugins/pluginsLogic.ts b/frontend/src/scenes/plugins/pluginsLogic.ts index e4ce68b032669..0c3ecd7912790 100644 --- a/frontend/src/scenes/plugins/pluginsLogic.ts +++ b/frontend/src/scenes/plugins/pluginsLogic.ts @@ -574,6 +574,15 @@ export const pluginsLogic = kea([ ) }, ], + defaultEnabledPlugins: [ + (s) => [s.filteredEnabledPlugins, s.filteredDisabledPlugins], + (filteredEnabledPlugins, filteredDisabledPlugins) => { + const defaultEnabledPluginIds = [1] // GEO IP plugin + return filteredEnabledPlugins + .concat(filteredDisabledPlugins) + .filter((plugin) => defaultEnabledPluginIds.includes(plugin.id)) + }, + ], pluginUrlToMaintainer: [ (s) => [s.repository], (repository) => { diff --git a/frontend/src/scenes/project/CreateProjectModal.tsx b/frontend/src/scenes/project/CreateProjectModal.tsx index b1212bf53aeea..eeba0420612fb 100644 --- a/frontend/src/scenes/project/CreateProjectModal.tsx +++ b/frontend/src/scenes/project/CreateProjectModal.tsx @@ -57,9 +57,9 @@ export function CreateProjectModal({ Learn more in PostHog Docs.

- {currentOrganization?.teams?.some((team) => team.name === 'Default Project') && ( + {currentOrganization?.teams?.some((team) => team.name.toLowerCase() === 'default project') && (

- Bonus tip: You can always rename your "Default Project". + Bonus tip: You can always rename your "Default project".

)} diff --git a/frontend/src/scenes/session-recordings/filters/DurationTypeSelect.tsx b/frontend/src/scenes/session-recordings/filters/DurationTypeSelect.tsx index e6585bf4306d4..09c4517b1c4d4 100644 --- a/frontend/src/scenes/session-recordings/filters/DurationTypeSelect.tsx +++ b/frontend/src/scenes/session-recordings/filters/DurationTypeSelect.tsx @@ -1,5 +1,5 @@ import { LemonSelect } from '@posthog/lemon-ui' -import { posthog } from 'posthog-js' +import posthog from 'posthog-js' import { DurationType } from '~/types' diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss index 5fcef01ceee1e..37ea604986d6a 100644 --- a/frontend/src/styles/global.scss +++ b/frontend/src/styles/global.scss @@ -506,11 +506,6 @@ body { fill: var(--border-3000); } - .ant-table-tbody > tr.ant-table-row:hover > td, - .ant-table-tbody > tr > td.ant-table-cell-row-hover { - background-color: var(--mid); - } - @include dark-mode-3000-variables; } @@ -685,45 +680,6 @@ body { max-width: 350px; } - .ant-table-thead > tr > th, - .ant-table-small .ant-table-thead > tr > th { - background: var(--mid); - } - - .ant-table-tbody > tr > td { - border-bottom-color: var(--border); - } - - .ant-table-tbody > tr.ant-table-placeholder:hover > td { - background: inherit; - } - - .ant-table { - color: var(--text-3000); - } - - .ant-pagination-item-active { - border-color: var(--link); - - & a { - color: var(--link); - } - - & a:hover { - color: var(--link); - } - } - - .ant-pagination-item:hover { - border-color: var(--link); - } - - .ant-pagination-item:hover a, - .ant-pagination-prev:hover .ant-pagination-item-link, - .ant-pagination-next:hover .ant-pagination-item-link { - color: var(--link); - } - @include common-variables; } diff --git a/frontend/src/toolbar/index.tsx b/frontend/src/toolbar/index.tsx index 66f36bd4f45ad..a5bdb7923fa1c 100644 --- a/frontend/src/toolbar/index.tsx +++ b/frontend/src/toolbar/index.tsx @@ -1,7 +1,7 @@ import '~/styles' import './styles.scss' -import { PostHog } from 'posthog-js' +import type { PostHog } from 'posthog-js' import { createRoot } from 'react-dom/client' import { initKea } from '~/initKea' diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 662f83942c1b6..511595e56a896 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -20,7 +20,7 @@ import { } from 'lib/constants' import { Dayjs, dayjs } from 'lib/dayjs' import { PopoverProps } from 'lib/lemon-ui/Popover/Popover' -import { PostHog } from 'posthog-js' +import type { PostHog } from 'posthog-js' import { Layout } from 'react-grid-layout' import { LogLevel } from 'rrweb' import { BehavioralFilterKey, BehavioralFilterType } from 'scenes/cohorts/CohortFilters/types' diff --git a/frontend/utils.mjs b/frontend/utils.mjs index 16ad8b984c51e..e4e9bbd5d9c70 100644 --- a/frontend/utils.mjs +++ b/frontend/utils.mjs @@ -194,15 +194,21 @@ function getChunks(result) { } export async function buildInParallel(configs, { onBuildStart, onBuildComplete } = {}) { - await Promise.all( - configs.map((config) => - buildOrWatch({ - ...config, - onBuildStart, - onBuildComplete, - }) + try { + await Promise.all( + configs.map((config) => + buildOrWatch({ + ...config, + onBuildStart, + onBuildComplete, + }) + ) ) - ) + } catch (e) { + if (!isDev) { + process.exit(1) + } + } if (!isDev) { process.exit(0) @@ -338,7 +344,11 @@ export async function buildOrWatch(config) { ...buildResult.metafile, } } catch (e) { - log({ success: false, name, time }) + if (isDev) { + log({ success: false, name, time }) + } else { + throw e + } } } diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 9ade46300e425..ed1766c1754ab 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0395_alter_batchexportbackfill_end_at +posthog: 0396_projects_and_environments sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 97ad2d3ed57bc..c095bc7fff997 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -673,6 +673,7 @@ posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] +posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] posthog/hogql/test/test_query.py:0: error: Argument 1 to "len" has incompatible type "list[Any] | None"; expected "Sized" [arg-type] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] diff --git a/plugin-server/src/main/ingestion-queues/session-recording/process-event.ts b/plugin-server/src/main/ingestion-queues/session-recording/process-event.ts index 9795bbcbeef2e..a729fb23fcff6 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/process-event.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/process-event.ts @@ -236,6 +236,18 @@ function isAnyMouseActivity(event: RRWebEvent) { ) } +/** + * meta event has type = 4 and event.data.href + * and custom events have type = 5 and _might_ have event.data.payload.href + * + * we don't really care what type of event they are just whether they have a href + */ +function hrefFrom(event: RRWebEvent): string | undefined { + const metaHref = event.data?.href?.trim() + const customHref = event.data?.payload?.href?.trim() + return metaHref || customHref || undefined +} + export const createSessionReplayEvent = ( uuid: string, team_id: number, @@ -275,9 +287,12 @@ export const createSessionReplayEvent = ( keypressCount += 1 } } - if (url === null && !!event.data?.href?.trim().length) { - url = event.data.href + + const eventUrl: string | undefined = hrefFrom(event) + if (url === null && eventUrl) { + url = eventUrl } + if (event.type === RRWebEventType.Plugin && event.data?.plugin === 'rrweb/console@1') { const level = safeLevel(event.data.payload?.level) if (level === 'info') { diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/process-event.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/process-event.test.ts index 8bca6952603a1..d74d3a2de9e23 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/process-event.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/process-event.test.ts @@ -181,7 +181,44 @@ describe('session recording process event', () => { }, }, { - testDescription: 'first url detection', + testDescription: 'url can be detected in meta event', + snapshotData: { + events_summary: [ + { + timestamp: 1682449093693, + type: 3, + data: {}, + windowId: '1', + }, + { + timestamp: 1682449093469, + type: 4, + data: { + href: 'http://127.0.0.1:8000/the/url', + }, + windowId: '1', + }, + ], + }, + expected: { + click_count: 0, + keypress_count: 0, + mouse_activity_count: 0, + first_url: 'http://127.0.0.1:8000/the/url', + first_timestamp: '2023-04-25 18:58:13.469', + last_timestamp: '2023-04-25 18:58:13.693', + active_milliseconds: 0, // no data.source, so no activity + console_log_count: 0, + console_warn_count: 0, + console_error_count: 0, + size: 163, + event_count: 2, + message_count: 1, + snapshot_source: 'web', + }, + }, + { + testDescription: 'first url detection takes the first url whether meta url or payload url', snapshotData: { events_summary: [ { @@ -189,7 +226,6 @@ describe('session recording process event', () => { type: 5, data: { payload: { - // doesn't match because href is nested in payload href: 'http://127.0.0.1:8000/home', }, }, @@ -209,7 +245,7 @@ describe('session recording process event', () => { click_count: 0, keypress_count: 0, mouse_activity_count: 0, - first_url: 'http://127.0.0.1:8000/second/url', + first_url: 'http://127.0.0.1:8000/home', first_timestamp: '2023-04-25 18:58:13.469', last_timestamp: '2023-04-25 18:58:13.693', active_milliseconds: 0, // no data.source, so no activity @@ -222,6 +258,51 @@ describe('session recording process event', () => { snapshot_source: 'web', }, }, + { + testDescription: 'first url detection can use payload url', + snapshotData: { + events_summary: [ + { + timestamp: 1682449093469, + type: 5, + data: { + payload: { + // we don't read just any URL + 'the-page-url': 'http://127.0.0.1:8000/second/url', + }, + }, + windowId: '1', + }, + { + timestamp: 1682449093693, + type: 5, + data: { + payload: { + // matches href nested in payload + href: 'http://127.0.0.1:8000/my-spa', + }, + }, + windowId: '1', + }, + ], + }, + expected: { + click_count: 0, + keypress_count: 0, + mouse_activity_count: 0, + first_url: 'http://127.0.0.1:8000/my-spa', + first_timestamp: '2023-04-25 18:58:13.469', + last_timestamp: '2023-04-25 18:58:13.693', + active_milliseconds: 0, // no data.source, so no activity + console_log_count: 0, + console_warn_count: 0, + console_error_count: 0, + size: 235, + event_count: 2, + message_count: 1, + snapshot_source: 'web', + }, + }, { testDescription: 'negative timestamps are not included when picking timestamps', snapshotData: { diff --git a/posthog/api/property_definition.py b/posthog/api/property_definition.py index 6b37519ac1960..9ffa473189243 100644 --- a/posthog/api/property_definition.py +++ b/posthog/api/property_definition.py @@ -4,14 +4,8 @@ from django.db import connection from django.db.models import Prefetch -from rest_framework import ( - mixins, - serializers, - viewsets, - status, - request, - response, -) +from loginas.utils import is_impersonated_session +from rest_framework import mixins, request, response, serializers, status, viewsets from rest_framework.decorators import action from rest_framework.exceptions import ValidationError from rest_framework.pagination import LimitOffsetPagination @@ -23,10 +17,9 @@ from posthog.event_usage import report_user_action from posthog.exceptions import EnterpriseFeatureException from posthog.filters import TermSearchFilterBackend, term_search_filter_sql -from posthog.models import PropertyDefinition, TaggedItem, User, EventProperty -from posthog.models.activity_logging.activity_log import log_activity, Detail +from posthog.models import EventProperty, PropertyDefinition, TaggedItem, User +from posthog.models.activity_logging.activity_log import Detail, log_activity from posthog.models.utils import UUIDT -from loginas.utils import is_impersonated_session class SeenTogetherQuerySerializer(serializers.Serializer): @@ -245,9 +238,11 @@ def with_excluded_properties(self, excluded_properties: Optional[str], type: str ) return dataclasses.replace( self, - excluded_properties_filter=f"AND {self.property_definition_table}.name NOT IN %(excluded_properties)s" - if len(excluded_list) > 0 - else "", + excluded_properties_filter=( + f"AND {self.property_definition_table}.name NOT IN %(excluded_properties)s" + if len(excluded_list) > 0 + else "" + ), params={ **self.params, "excluded_properties": excluded_list, @@ -580,7 +575,7 @@ def get_object(self): def list(self, request, *args, **kwargs): return super().list(request, *args, **kwargs) - @action(methods=["GET"], detail=False) + @action(methods=["GET"], detail=False, required_scopes=["property_definition:read"]) def seen_together(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: """ Allows a caller to provide a list of event names and a single property name diff --git a/posthog/api/team.py b/posthog/api/team.py index 15ab9bae71a0d..1b615bd692643 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -63,24 +63,19 @@ def has_permission(self, request: request.Request, view) -> bool: except ValueError: return False - # if we're not requesting to make a demo project - # and if the org already has more than 1 non-demo project (need to be able to make the initial project) - # and the org isn't allowed to make multiple projects - if ( - ("is_demo" not in request.data or not request.data["is_demo"]) - and organization.teams.exclude(is_demo=True).count() >= 1 - and not organization.is_feature_available(AvailableFeature.ORGANIZATIONS_PROJECTS) - ): - return False - - # if we ARE requesting to make a demo project - # but the org already has a demo project - if ( - "is_demo" in request.data - and request.data["is_demo"] - and organization.teams.exclude(is_demo=False).count() > 0 - ): - return False + if not request.data.get("is_demo"): + # if we're not requesting to make a demo project + # and if the org already has more than 1 non-demo project (need to be able to make the initial project) + # and the org isn't allowed to make multiple projects + if organization.teams.exclude(is_demo=True).count() >= 1 and not organization.is_feature_available( + AvailableFeature.ORGANIZATIONS_PROJECTS + ): + return False + else: + # if we ARE requesting to make a demo project + # but the org already has a demo project + if organization.teams.filter(is_demo=True).count() > 0: + return False # in any other case, we're good to go return True diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 74d16c63216a4..3b71cb1191f91 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -196,6 +197,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -538,6 +540,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index 62159d2f01b27..b75f8a9661f7e 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -118,6 +119,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -454,6 +456,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index 32ba4e38c4500..1bb6ffa074d20 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -284,6 +285,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -445,6 +447,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -599,6 +602,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index 4db55d14a92a2..7a472bdce6edc 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -4,6 +4,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -140,6 +141,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 0729c028fec5f..67722a01933c0 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index 615477721a72f..2d11fc4500367 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -444,6 +444,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -645,6 +646,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1006,6 +1008,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1143,6 +1146,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1437,6 +1441,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1527,6 +1532,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1616,6 +1622,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1675,6 +1682,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 687de0f873767..5bdf7b792790b 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -674,6 +674,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -726,6 +727,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -854,6 +856,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1092,6 +1095,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1240,6 +1244,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1373,6 +1378,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1485,6 +1491,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1614,6 +1621,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1701,6 +1709,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1787,6 +1796,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1846,6 +1856,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index c16bf238a6045..38996ee675b74 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -84,6 +84,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -191,6 +192,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -278,6 +280,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -489,6 +492,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -617,6 +621,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -765,6 +770,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -852,6 +858,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1069,6 +1076,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1197,6 +1205,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1249,6 +1258,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1397,6 +1407,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1671,6 +1682,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index b5686cfb79935..b4c8a06aad816 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -46,6 +46,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 41c085776554e..97493427f7527 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -114,6 +114,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index da4204fc6df38..642602f396f8d 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -164,6 +165,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -290,6 +292,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -509,6 +512,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -681,6 +685,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -863,6 +868,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1036,6 +1042,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1306,6 +1313,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1365,6 +1373,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1526,6 +1535,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1638,6 +1648,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1697,6 +1708,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1854,6 +1866,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1982,6 +1995,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2246,6 +2260,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2490,6 +2505,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2624,6 +2640,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2750,6 +2767,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2873,6 +2891,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2974,6 +2993,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3126,6 +3146,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3224,6 +3245,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3353,6 +3375,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3478,6 +3501,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3614,6 +3638,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3940,6 +3965,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4102,6 +4128,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4242,6 +4269,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4329,6 +4357,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4492,6 +4521,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4551,6 +4581,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4676,6 +4707,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4830,6 +4862,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5261,6 +5294,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5408,6 +5442,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5495,6 +5530,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5620,6 +5656,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5706,6 +5743,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5765,6 +5803,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5890,6 +5929,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -6032,6 +6072,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -6196,6 +6237,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -6613,6 +6655,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -6770,6 +6813,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -6957,6 +7001,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -7129,6 +7174,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -7272,6 +7318,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -7363,6 +7410,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -7536,6 +7584,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8182,6 +8231,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8445,6 +8495,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8608,6 +8659,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8667,6 +8719,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8792,6 +8845,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -8946,6 +9000,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9071,6 +9126,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9208,6 +9264,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9350,6 +9407,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9663,6 +9721,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9821,6 +9880,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -9929,6 +9989,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10062,6 +10123,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10371,6 +10433,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10508,6 +10571,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10695,6 +10759,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10860,6 +10925,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -10965,6 +11031,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11133,6 +11200,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11319,6 +11387,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11431,6 +11500,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11599,6 +11669,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11744,6 +11815,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -11962,6 +12034,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index 7677f829e61c5..eaa59d4e760a8 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -35,6 +35,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -171,6 +172,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -394,6 +396,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -501,6 +504,7 @@ "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index d4e71415b4569..e62be1ffd4893 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -78,7 +78,7 @@ def test_api_sign_up(self, mock_capture): self.assertFalse(user.is_email_verified) # Assert that the team was properly created - self.assertEqual(team.name, "Default Project") + self.assertEqual(team.name, "Default project") # Assert that the org was properly created self.assertEqual(organization.name, "Hedgehogs United, LLC") diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py index 6b336ee1f15c7..d23efe81cf7d8 100644 --- a/posthog/api/test/test_team.py +++ b/posthog/api/test/test_team.py @@ -87,7 +87,7 @@ def test_retrieve_project(self): def test_cant_retrieve_project_from_another_org(self): org = Organization.objects.create(name="New Org") - team = Team.objects.create(organization=org, name="Default Project") + team = Team.objects.create(organization=org, name="Default project") response = self.client.get(f"/api/projects/{team.pk}/") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -131,9 +131,9 @@ def test_cant_create_team_without_license_on_selfhosted(self): def test_cant_create_a_second_project_without_license(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() - response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": False}) - self.assertEqual(Team.objects.count(), 1) + + response = self.client.post("/api/projects/", {"name": "Hedgebox", "is_demo": False}) self.assertEqual(response.status_code, 403) response_data = response.json() self.assertDictContainsSubset( @@ -144,10 +144,10 @@ def test_cant_create_a_second_project_without_license(self): }, response_data, ) + self.assertEqual(Team.objects.count(), 1) # another request without the is_demo parameter response = self.client.post("/api/projects/", {"name": "Hedgebox"}) - self.assertEqual(Team.objects.count(), 1) self.assertEqual(response.status_code, 403) response_data = response.json() self.assertDictContainsSubset( @@ -158,6 +158,7 @@ def test_cant_create_a_second_project_without_license(self): }, response_data, ) + self.assertEqual(Team.objects.count(), 1) @freeze_time("2022-02-08") def test_update_project_timezone(self): @@ -188,7 +189,7 @@ def test_update_project_timezone(self): "type": "Team", }, ], - "name": "Default Project", + "name": "Default project", "short_id": None, "trigger": None, "type": None, @@ -231,7 +232,7 @@ def test_cannot_set_invalid_timezone_for_project(self): def test_cant_update_project_from_another_org(self): org = Organization.objects.create(name="New Org") - team = Team.objects.create(organization=org, name="Default Project") + team = Team.objects.create(organization=org, name="Default project") response = self.client.patch(f"/api/projects/{team.pk}/", {"timezone": "Africa/Accra"}) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -282,7 +283,7 @@ def test_delete_team_activity_log(self, mock_capture: MagicMock, mock_delete_bul "created_at": ANY, "detail": { "changes": None, - "name": "Default Project", + "name": "Default project", "short_id": None, "trigger": None, "type": None, @@ -458,7 +459,7 @@ def test_reset_token(self): "type": "Team", }, ], - "name": "Default Project", + "name": "Default project", "short_id": None, "trigger": None, "type": None, @@ -492,7 +493,7 @@ def test_update_primary_dashboard(self): self.assertEqual(response_data["primary_dashboard"], d.id) def test_cant_set_primary_dashboard_to_another_teams_dashboard(self): - team_2 = Team.objects.create(organization=self.organization, name="Default Project") + team_2 = Team.objects.create(organization=self.organization, name="Default project") d = Dashboard.objects.create(name="Test", team=team_2) response = self.client.patch("/api/projects/@current/", {"primary_dashboard": d.id}) @@ -565,7 +566,7 @@ def test_team_float_config_can_be_serialized_to_activity_log(self): "type": "Team", }, ], - "name": "Default Project", + "name": "Default project", "short_id": None, "trigger": None, "type": None, diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index 5e84d7f446b3d..dd833f521c4f3 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -28,11 +28,9 @@ BatchExportSchema, BatchExportServiceError, BatchExportServiceRPCError, - BatchExportServiceScheduleNotFound, BatchExportWithNoEndNotAllowedError, backfill_export, - batch_export_delete_schedule, - cancel_running_batch_export_backfill, + disable_and_delete_export, pause_batch_export, sync_batch_export, unpause_batch_export, @@ -43,7 +41,6 @@ from posthog.hogql.printer import prepare_ast_for_printing, print_prepared_ast from posthog.models import ( BatchExport, - BatchExportBackfill, BatchExportDestination, BatchExportRun, Team, @@ -436,23 +433,7 @@ def perform_destroy(self, instance: BatchExport): since we are deleting, we assume that we can recover from this state by finishing the delete operation by calling instance.save(). """ - temporal = sync_connect() - - instance.deleted = True - - try: - batch_export_delete_schedule(temporal, str(instance.pk)) - except BatchExportServiceScheduleNotFound as e: - logger.warning( - "The Schedule %s could not be deleted as it was not found", - e.schedule_id, - ) - - instance.save() - - for backfill in BatchExportBackfill.objects.filter(batch_export=instance): - if backfill.status == BatchExportBackfill.Status.RUNNING: - cancel_running_batch_export_backfill(temporal, backfill.workflow_id) + disable_and_delete_export(instance) class BatchExportOrganizationViewSet(BatchExportViewSet): diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index 4930665d13f6d..f3d5715220bf3 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -3,6 +3,7 @@ from dataclasses import asdict, dataclass, fields from uuid import UUID +import structlog import temporalio from asgiref.sync import async_to_sync from temporalio.client import ( @@ -32,6 +33,8 @@ update_schedule, ) +logger = structlog.get_logger(__name__) + class BatchExportField(typing.TypedDict): """A field to be queried from ClickHouse. @@ -291,6 +294,27 @@ def unpause_batch_export( backfill_export(temporal, batch_export_id, batch_export.team_id, start_at, end_at) +def disable_and_delete_export(instance: BatchExport): + """Mark a BatchExport as deleted and delete its Temporal Schedule (including backfills).""" + temporal = sync_connect() + + instance.deleted = True + + try: + batch_export_delete_schedule(temporal, str(instance.pk)) + except BatchExportServiceScheduleNotFound as e: + logger.warning( + "The Schedule %s could not be deleted as it was not found", + e.schedule_id, + ) + + instance.save() + + for backfill in BatchExportBackfill.objects.filter(batch_export=instance): + if backfill.status == BatchExportBackfill.Status.RUNNING: + cancel_running_batch_export_backfill(temporal, backfill.workflow_id) + + def batch_export_delete_schedule(temporal: Client, schedule_id: str) -> None: """Delete a Temporal Schedule.""" try: diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr index b40f78c801e09..74540ceca8b78 100644 --- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr +++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr @@ -1713,7 +1713,7 @@ sumIf(1, event='$autocapture') as autocapture_count FROM posthog_test.sharded_events - WHERE `$session_id` IS NOT NULL AND `$session_id` != '' AND toStartOfDay(timestamp) >= '2024-03-08' + WHERE `$session_id` IS NOT NULL AND `$session_id` != '' GROUP BY `$session_id`, team_id ''' diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 72fef04af9dd8..e78df97c56823 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -44,6 +44,7 @@ RawSessionReplayEventsTable, SessionReplayEventsTable, ) +from posthog.hogql.database.schema.sessions import RawSessionsTable, SessionsTable from posthog.hogql.database.schema.static_cohort_people import StaticCohortPeople from posthog.hogql.errors import HogQLException from posthog.hogql.parser import parse_expr @@ -72,6 +73,7 @@ class Database(BaseModel): log_entries: LogEntriesTable = LogEntriesTable() console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable() batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable() + sessions: SessionsTable = SessionsTable() raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable() raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable() @@ -79,6 +81,7 @@ class Database(BaseModel): raw_groups: RawGroupsTable = RawGroupsTable() raw_cohort_people: RawCohortPeople = RawCohortPeople() raw_person_overrides: RawPersonOverridesTable = RawPersonOverridesTable() + raw_sessions: RawSessionsTable = RawSessionsTable() # system tables numbers: NumbersTable = NumbersTable() @@ -94,6 +97,7 @@ class Database(BaseModel): "cohortpeople", "person_static_cohort", "log_entries", + "sessions", ] _warehouse_table_names: List[str] = [] diff --git a/posthog/hogql/database/schema/channel_type.py b/posthog/hogql/database/schema/channel_type.py index 702681aeeb29f..5dee575fc59a3 100644 --- a/posthog/hogql/database/schema/channel_type.py +++ b/posthog/hogql/database/schema/channel_type.py @@ -41,8 +41,29 @@ def create_initial_domain_type(name: str): def create_initial_channel_type(name: str): return ExpressionField( name=name, - expr=parse_expr( - """ + expr=create_channel_type_expr( + campaign=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_campaign"])]), + medium=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_medium"])]), + source=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_source"])]), + referring_domain=ast.Call( + name="toString", args=[ast.Field(chain=["properties", "$initial_referring_domain"])] + ), + gclid=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gclid"])]), + gad_source=ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gad_source"])]), + ), + ) + + +def create_channel_type_expr( + campaign: ast.Expr, + medium: ast.Expr, + source: ast.Expr, + referring_domain: ast.Expr, + gclid: ast.Expr, + gad_source: ast.Expr, +) -> ast.Expr: + return parse_expr( + """ multiIf( match({campaign}, 'cross-network'), 'Cross Network', @@ -99,16 +120,13 @@ def create_initial_channel_type(name: str): ) ) )""", - start=None, - placeholders={ - "campaign": ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_campaign"])]), - "medium": ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_medium"])]), - "source": ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_utm_source"])]), - "referring_domain": ast.Call( - name="toString", args=[ast.Field(chain=["properties", "$initial_referring_domain"])] - ), - "gclid": ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gclid"])]), - "gad_source": ast.Call(name="toString", args=[ast.Field(chain=["properties", "$initial_gad_source"])]), - }, - ), + start=None, + placeholders={ + "campaign": campaign, + "medium": medium, + "source": source, + "referring_domain": referring_domain, + "gclid": gclid, + "gad_source": gad_source, + }, ) diff --git a/posthog/hogql/database/schema/sessions.py b/posthog/hogql/database/schema/sessions.py new file mode 100644 index 0000000000000..2a4865798eeb8 --- /dev/null +++ b/posthog/hogql/database/schema/sessions.py @@ -0,0 +1,158 @@ +from typing import Dict, List, cast + +from posthog.hogql.database.models import ( + StringDatabaseField, + DateTimeDatabaseField, + IntegerDatabaseField, + Table, + FieldOrTable, + StringArrayDatabaseField, + DatabaseField, + LazyTable, +) +from posthog.hogql.database.schema.channel_type import create_channel_type_expr +from posthog.schema import HogQLQueryModifiers + + +SESSIONS_COMMON_FIELDS: Dict[str, FieldOrTable] = { + "session_id": StringDatabaseField(name="session_id"), + "team_id": IntegerDatabaseField(name="team_id"), + "distinct_id": StringDatabaseField(name="distinct_id"), + "min_timestamp": DateTimeDatabaseField(name="min_timestamp"), + "max_timestamp": DateTimeDatabaseField(name="max_timestamp"), + "urls": StringArrayDatabaseField(name="urls"), + "entry_url": DatabaseField(name="entry_url"), + "exit_url": DatabaseField(name="exit_url"), + "initial_utm_source": DatabaseField(name="initial_utm_source"), + "initial_utm_campaign": DatabaseField(name="initial_utm_campaign"), + "initial_utm_medium": DatabaseField(name="initial_utm_medium"), + "initial_utm_term": DatabaseField(name="initial_utm_term"), + "initial_utm_content": DatabaseField(name="initial_utm_content"), + "initial_referring_domain": DatabaseField(name="initial_referring_domain"), + "initial_gclid": DatabaseField(name="initial_gclid"), + "initial_gad_source": DatabaseField(name="initial_gad_source"), + "event_count_map": DatabaseField(name="event_count_map"), + "pageview_count": IntegerDatabaseField(name="pageview_count"), + "autocapture_count": IntegerDatabaseField(name="autocapture_count"), +} + + +class RawSessionsTable(Table): + fields: Dict[str, FieldOrTable] = SESSIONS_COMMON_FIELDS + + def to_printed_clickhouse(self, context): + return "sessions" + + def to_printed_hogql(self): + return "raw_sessions" + + def avoid_asterisk_fields(self) -> List[str]: + # our clickhouse driver can't return aggregate states + return [ + "entry_url", + "exit_url", + "initial_utm_source", + "initial_utm_campaign", + "initial_utm_medium", + "initial_utm_term", + "initial_utm_content", + "initial_referring_domain", + "initial_gclid", + "initial_gad_source", + ] + + +def select_from_sessions_table(requested_fields: Dict[str, List[str | int]]): + from posthog.hogql import ast + + table_name = "raw_sessions" + + aggregate_fields = { + "distinct_id": ast.Call(name="any", args=[ast.Field(chain=[table_name, "distinct_id"])]), + "min_timestamp": ast.Call(name="min", args=[ast.Field(chain=[table_name, "min_timestamp"])]), + "max_timestamp": ast.Call(name="max", args=[ast.Field(chain=[table_name, "max_timestamp"])]), + "urls": ast.Call( + name="arrayDistinct", + args=[ + ast.Call( + name="arrayFlatten", + args=[ast.Call(name="groupArray", args=[ast.Field(chain=[table_name, "urls"])])], + ) + ], + ), + "entry_url": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "entry_url"])]), + "exit_url": ast.Call(name="argMaxMerge", args=[ast.Field(chain=[table_name, "exit_url"])]), + "initial_utm_source": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_source"])]), + "initial_utm_campaign": ast.Call( + name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_campaign"])] + ), + "initial_utm_medium": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_medium"])]), + "initial_utm_term": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_term"])]), + "initial_utm_content": ast.Call( + name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_content"])] + ), + "initial_referring_domain": ast.Call( + name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_referring_domain"])] + ), + "initial_gclid": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_gclid"])]), + "initial_gad_source": ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_gad_source"])]), + "event_count_map": ast.Call( + name="sumMap", + args=[ast.Field(chain=[table_name, "event_count_map"])], + ), + "pageview_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "pageview_count"])]), + "autocapture_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "autocapture_count"])]), + "duration": ast.Call( + name="dateDiff", + args=[ + ast.Constant(value="second"), + ast.Call(name="min", args=[ast.Field(chain=[table_name, "min_timestamp"])]), + ast.Call(name="max", args=[ast.Field(chain=[table_name, "max_timestamp"])]), + ], + ), + "channel_type": create_channel_type_expr( + campaign=ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_campaign"])]), + medium=ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_medium"])]), + source=ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_utm_source"])]), + referring_domain=ast.Call( + name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_referring_domain"])] + ), + gclid=ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_gclid"])]), + gad_source=ast.Call(name="argMinMerge", args=[ast.Field(chain=[table_name, "initial_gad_source"])]), + ), + } + + select_fields: List[ast.Expr] = [] + group_by_fields: List[ast.Expr] = [ast.Field(chain=[table_name, "session_id"])] + + for name, chain in requested_fields.items(): + if name in aggregate_fields: + select_fields.append(ast.Alias(alias=name, expr=aggregate_fields[name])) + else: + select_fields.append( + ast.Alias(alias=name, expr=ast.Field(chain=cast(list[str | int], [table_name]) + chain)) + ) + group_by_fields.append(ast.Field(chain=cast(list[str | int], [table_name]) + chain)) + + return ast.SelectQuery( + select=select_fields, + select_from=ast.JoinExpr(table=ast.Field(chain=[table_name])), + group_by=group_by_fields, + ) + + +class SessionsTable(LazyTable): + fields: Dict[str, FieldOrTable] = { + **SESSIONS_COMMON_FIELDS, + "duration": IntegerDatabaseField(name="duration"), + "channel_type": StringDatabaseField(name="channel_type"), + } + + def lazy_select(self, requested_fields: Dict[str, List[str | int]], modifiers: HogQLQueryModifiers): + return select_from_sessions_table(requested_fields) + + def to_printed_clickhouse(self, context): + return "sessions" + + def to_printed_hogql(self): + return "sessions" diff --git a/posthog/hogql/database/schema/test/test_sessions.py b/posthog/hogql/database/schema/test/test_sessions.py new file mode 100644 index 0000000000000..dc3ba50b5be60 --- /dev/null +++ b/posthog/hogql/database/schema/test/test_sessions.py @@ -0,0 +1,57 @@ +from posthog.hogql import ast +from posthog.hogql.parser import parse_select +from posthog.hogql.query import execute_hogql_query +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, +) + + +class TestReferringDomainType(ClickhouseTestMixin, APIBaseTest): + def test_select_star(self): + session_id = "session_test_select_star" + + _create_event( + event="$pageview", + team=self.team, + distinct_id="d1", + properties={"$current_url": "https://example.com", "$session_id": session_id}, + ) + + response = execute_hogql_query( + parse_select( + "select * from sessions where session_id = {session_id}", + placeholders={"session_id": ast.Constant(value=session_id)}, + ), + self.team, + ) + + self.assertEqual( + len(response.results or []), + 1, + ) + + def test_channel_type(self): + session_id = "session_test_channel_type" + + _create_event( + event="$pageview", + team=self.team, + distinct_id="d1", + properties={"gad_source": "1", "$session_id": session_id}, + ) + + response = execute_hogql_query( + parse_select( + "select channel_type from sessions where session_id = {session_id}", + placeholders={"session_id": ast.Constant(value=session_id)}, + ), + self.team, + ) + + result = (response.results or [])[0] + self.assertEqual( + result[0], + "Paid Search", + ) diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 7823b80094700..21c60457a1fd3 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -535,6 +535,44 @@ "type": "string" } ], + "sessions": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "min_timestamp", + "type": "datetime" + }, + { + "key": "max_timestamp", + "type": "datetime" + }, + { + "key": "urls", + "type": "array" + }, + { + "key": "pageview_count", + "type": "integer" + }, + { + "key": "autocapture_count", + "type": "integer" + }, + { + "key": "duration", + "type": "integer" + }, + { + "key": "channel_type", + "type": "string" + } + ], "raw_session_replay_events": [ { "key": "session_id", @@ -770,6 +808,36 @@ "type": "integer" } ], + "raw_sessions": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "min_timestamp", + "type": "datetime" + }, + { + "key": "max_timestamp", + "type": "datetime" + }, + { + "key": "urls", + "type": "array" + }, + { + "key": "pageview_count", + "type": "integer" + }, + { + "key": "autocapture_count", + "type": "integer" + } + ], "numbers": [ { "key": "number", @@ -1310,6 +1378,44 @@ "type": "string" } ], + "sessions": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "min_timestamp", + "type": "datetime" + }, + { + "key": "max_timestamp", + "type": "datetime" + }, + { + "key": "urls", + "type": "array" + }, + { + "key": "pageview_count", + "type": "integer" + }, + { + "key": "autocapture_count", + "type": "integer" + }, + { + "key": "duration", + "type": "integer" + }, + { + "key": "channel_type", + "type": "string" + } + ], "raw_session_replay_events": [ { "key": "session_id", @@ -1545,6 +1651,36 @@ "type": "integer" } ], + "raw_sessions": [ + { + "key": "session_id", + "type": "string" + }, + { + "key": "distinct_id", + "type": "string" + }, + { + "key": "min_timestamp", + "type": "datetime" + }, + { + "key": "max_timestamp", + "type": "datetime" + }, + { + "key": "urls", + "type": "array" + }, + { + "key": "pageview_count", + "type": "integer" + }, + { + "key": "autocapture_count", + "type": "integer" + } + ], "numbers": [ { "key": "number", diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index a70bd37605253..5edf1a68a826a 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -612,6 +612,7 @@ class HogQLFunctionMeta: "argMax": HogQLFunctionMeta("argMax", 2, 2, aggregate=True), "argMaxIf": HogQLFunctionMeta("argMaxIf", 3, 3, aggregate=True), "argMinMerge": HogQLFunctionMeta("argMinMerge", 1, 1, aggregate=True), + "argMaxMerge": HogQLFunctionMeta("argMaxMerge", 1, 1, aggregate=True), "avgWeighted": HogQLFunctionMeta("avgWeighted", 2, 2, aggregate=True), "avgWeightedIf": HogQLFunctionMeta("avgWeightedIf", 3, 3, aggregate=True), # "topK": HogQLFunctionMeta("topK", 1, 1, aggregate=True), @@ -652,6 +653,7 @@ class HogQLFunctionMeta: "deltaSumTimestampIf": HogQLFunctionMeta("deltaSumTimestampIf", 3, 3, aggregate=True), "sumMap": HogQLFunctionMeta("sumMap", 1, 2, aggregate=True), "sumMapIf": HogQLFunctionMeta("sumMapIf", 2, 3, aggregate=True), + "sumMapMerge": HogQLFunctionMeta("sumMapMerge", 1, 1, aggregate=True), "minMap": HogQLFunctionMeta("minMap", 1, 2, aggregate=True), "minMapIf": HogQLFunctionMeta("minMapIf", 2, 3, aggregate=True), "maxMap": HogQLFunctionMeta("maxMap", 1, 2, aggregate=True), diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 8962eb4593b78..f6eb3748afb2b 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -93,9 +93,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_action_filtering_with_cohort_poe_v2 @@ -180,9 +180,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events @@ -249,9 +249,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_by_group_props_person_on_events.2 @@ -349,9 +349,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format @@ -402,9 +402,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.2 @@ -441,9 +441,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated @@ -499,9 +499,9 @@ ORDER BY d.timestamp ASC) WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) GROUP BY breakdown_value - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_aggregated_materialized @@ -557,9 +557,9 @@ ORDER BY d.timestamp ASC) WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 23:59:59', 6, 'UTC'))), 0)) GROUP BY breakdown_value - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_weekly_active_users_daily_based_on_action @@ -692,9 +692,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events @@ -761,9 +761,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_breakdown_with_filter_groups_person_on_events_v2 @@ -850,9 +850,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling @@ -910,9 +910,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 @@ -970,9 +970,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_filter_events_by_precalculated_cohort @@ -1054,9 +1054,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_filter_events_by_precalculated_cohort_poe_v2 @@ -1120,9 +1120,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_filtering_by_multiple_groups_person_on_events @@ -1162,9 +1162,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_filtering_by_multiple_groups_person_on_events.1 @@ -1235,9 +1235,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter @@ -1342,9 +1342,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2 @@ -1423,9 +1423,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_non_deterministic_timezones @@ -1449,9 +1449,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action @@ -1546,9 +1546,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2 @@ -1641,9 +1641,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering @@ -1685,9 +1685,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property @@ -1729,9 +1729,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property.1 @@ -1755,9 +1755,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized @@ -1799,9 +1799,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized.1 @@ -1825,9 +1825,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_person_property_filtering_materialized @@ -1869,9 +1869,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2 @@ -1909,9 +1909,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2.2 @@ -1941,9 +1941,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override @@ -1987,9 +1987,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.2 @@ -2033,9 +2033,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.4 @@ -2079,9 +2079,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily @@ -2105,9 +2105,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily.1 @@ -2138,9 +2138,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily.2 @@ -2184,9 +2184,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily.3 @@ -2210,9 +2210,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily.4 @@ -2270,9 +2270,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc @@ -2296,9 +2296,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.1 @@ -2329,9 +2329,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.2 @@ -2375,9 +2375,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.3 @@ -2401,9 +2401,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_minus_utc.4 @@ -2461,9 +2461,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc @@ -2487,9 +2487,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.1 @@ -2520,9 +2520,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.2 @@ -2566,9 +2566,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.3 @@ -2592,9 +2592,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_daily_plus_utc.4 @@ -2652,9 +2652,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from @@ -2685,9 +2685,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from.1 @@ -2711,9 +2711,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_minus_utc @@ -2744,9 +2744,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_minus_utc.1 @@ -2770,9 +2770,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_plus_utc @@ -2803,9 +2803,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_hourly_relative_from_plus_utc.1 @@ -2829,9 +2829,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly @@ -2855,9 +2855,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly.1 @@ -2881,9 +2881,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly_minus_utc @@ -2907,9 +2907,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly_minus_utc.1 @@ -2933,9 +2933,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly_plus_utc @@ -2959,9 +2959,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_timezones_weekly_plus_utc.1 @@ -2985,9 +2985,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns @@ -3078,9 +3078,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.2 @@ -3171,9 +3171,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id @@ -3197,9 +3197,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.1 @@ -3241,9 +3241,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.2 @@ -3330,9 +3330,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.4 @@ -3369,9 +3369,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.5 @@ -3408,9 +3408,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_aggregate_by_distinct_id.6 @@ -3461,9 +3461,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_any_event_total_count @@ -3487,9 +3487,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_any_event_total_count.1 @@ -3513,9 +3513,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative @@ -3578,9 +3578,9 @@ ORDER BY day_start ASC, breakdown_value ASC)) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_breakdown_cumulative_poe_v2 @@ -3642,9 +3642,9 @@ ORDER BY day_start ASC, breakdown_value ASC)) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown @@ -3684,9 +3684,9 @@ GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown.2 @@ -3726,9 +3726,9 @@ GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range @@ -3752,9 +3752,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.1 @@ -3778,9 +3778,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_compare_day_interval_relative_range.2 @@ -3804,9 +3804,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated @@ -3826,9 +3826,9 @@ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id) WHERE and(equals(e.team_id, 2), equals(e.event, 'viewed video'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(0))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC')))) GROUP BY e__pdi.person_id)) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_poe_v2 @@ -3847,9 +3847,9 @@ GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id) WHERE and(equals(e.team_id, 2), equals(e.event, 'viewed video'), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(0))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC')))) GROUP BY ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id))) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_count_per_user_average_aggregated_with_event_property_breakdown_with_sampling @@ -3887,9 +3887,9 @@ GROUP BY e__pdi.person_id, breakdown_value) GROUP BY breakdown_value) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_count_per_user_average_daily @@ -3928,9 +3928,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_count_per_user_average_daily_poe_v2 @@ -3968,9 +3968,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_groups_per_day @@ -3994,9 +3994,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_groups_per_day_cumulative @@ -4024,9 +4024,9 @@ GROUP BY day_start ORDER BY day_start ASC)) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_per_day_cumulative @@ -4054,9 +4054,9 @@ GROUP BY day_start ORDER BY day_start ASC)) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_per_day_dau_cumulative @@ -4091,9 +4091,9 @@ GROUP BY day_start ORDER BY day_start ASC)) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown @@ -4169,9 +4169,9 @@ GROUP BY e__session.id, breakdown_value) GROUP BY breakdown_value - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_hogql_math @@ -4195,9 +4195,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_single_aggregate_math @@ -4214,9 +4214,9 @@ GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e__session.id) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_single_aggregate_math.1 @@ -4233,9 +4233,9 @@ GROUP BY id) AS e__session ON equals(e.`$session_id`, e__session.id) WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY e__session.id) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math @@ -4271,9 +4271,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math.1 @@ -4309,9 +4309,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns @@ -4382,9 +4382,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.2 @@ -4455,9 +4455,9 @@ ORDER BY day_start ASC, breakdown_value ASC) GROUP BY breakdown_value ORDER BY sum(count) DESC, breakdown_value ASC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_narrower_than_week @@ -4487,9 +4487,9 @@ e.actor_id ORDER BY d.timestamp ASC) WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-11 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), 0)) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_wider_than_week @@ -4519,9 +4519,9 @@ e.actor_id ORDER BY d.timestamp ASC) WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 23:59:59', 6, 'UTC'))), 0)) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_aggregated_range_wider_than_week_with_sampling @@ -4551,9 +4551,9 @@ e.actor_id ORDER BY d.timestamp ASC) WHERE and(ifNull(greaterOrEquals(timestamp, toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 23:59:59', 6, 'UTC'))), 0)) - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_daily @@ -4597,9 +4597,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_daily_minus_utc @@ -4643,9 +4643,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_daily_plus_utc @@ -4689,9 +4689,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_filtering @@ -4746,9 +4746,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_filtering_materialized @@ -4803,9 +4803,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_hourly @@ -4849,9 +4849,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_weekly @@ -4895,9 +4895,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_weekly_minus_utc @@ -4941,9 +4941,9 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- # name: TestTrends.test_weekly_active_users_weekly_plus_utc @@ -4987,8 +4987,8 @@ GROUP BY day_start ORDER BY day_start ASC) ORDER BY sum(count) DESC - LIMIT 100 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1 + LIMIT 10000 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1 ''' # --- diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index 240c90c98c683..104e232a01406 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -3,6 +3,8 @@ from unittest.mock import patch from django.test import override_settings from freezegun import freeze_time +from posthog.hogql import ast +from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS from posthog.hogql.modifiers import create_default_modifiers_for_team from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner from posthog.models.cohort.cohort import Cohort @@ -1115,7 +1117,7 @@ def test_breakdown_values_limit(self): def test_breakdown_values_world_map_limit(self): PropertyDefinition.objects.create(team=self.team, name="breakdown_value", property_type="String") - for value in list(range(30)): + for value in list(range(250)): _create_event( team=self.team, event="$pageview", @@ -1124,7 +1126,7 @@ def test_breakdown_values_world_map_limit(self): properties={"breakdown_value": f"{value}"}, ) - response = self._run_trends_query( + query_runner = self._create_query_runner( "2020-01-09", "2020-01-20", IntervalType.day, @@ -1132,8 +1134,11 @@ def test_breakdown_values_world_map_limit(self): TrendsFilter(display=ChartDisplayType.WorldMap), BreakdownFilter(breakdown="breakdown_value", breakdown_type=BreakdownType.event), ) + query = query_runner.to_queries()[0] + assert isinstance(query, ast.SelectQuery) and query.limit == ast.Constant(value=MAX_SELECT_RETURNED_ROWS) - assert len(response.results) == 30 + response = query_runner.calculate() + assert len(response.results) == 250 def test_previous_period_with_number_display(self): self._create_test_events() diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index f8ef1f579eebd..ac741cc45f729 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -18,7 +18,7 @@ from posthog.caching.utils import is_stale from posthog.hogql import ast -from posthog.hogql.constants import LimitContext +from posthog.hogql.constants import LimitContext, MAX_SELECT_RETURNED_ROWS from posthog.hogql.printer import to_printed_hogql from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings @@ -142,8 +142,13 @@ def to_queries(self) -> List[ast.SelectQuery | ast.SelectUnionQuery]: timings=self.timings, modifiers=self.modifiers, ) + query = query_builder.build_query() - queries.append(query_builder.build_query()) + # Get around the default 100 limit, bump to the max 10000. + # This is useful for the world map view and other cases with a lot of breakdowns. + if isinstance(query, ast.SelectQuery) and query.limit is None: + query.limit = ast.Constant(value=MAX_SELECT_RETURNED_ROWS) + queries.append(query) return queries diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index a7ced15c87b33..f15505365ba66 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -1,5 +1,6 @@ from posthog.hogql import ast from posthog.hogql.constants import LimitContext +from posthog.hogql.database.schema.channel_type import create_channel_type_expr from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator from posthog.hogql_queries.web_analytics.ctes import ( @@ -273,72 +274,17 @@ def to_channel_query(self): (SELECT - multiIf( - match(initial_utm_campaign, 'cross-network'), - 'Cross Network', - - ( - match(initial_utm_medium, '^(.*cp.*|ppc|retargeting|paid.*)$') OR - initial_gclid IS NOT NULL OR - initial_gad_source IS NOT NULL - ), - coalesce( - hogql_lookupPaidSourceType(initial_utm_source), - hogql_lookupPaidDomainType(initial_referring_domain), - if( - match(initial_utm_campaign, '^(.*(([^a-df-z]|^)shop|shopping).*)$'), - 'Paid Shopping', - NULL - ), - hogql_lookupPaidMediumType(initial_utm_medium), - multiIf ( - initial_gad_source = '1', - 'Paid Search', - - match(initial_utm_campaign, '^(.*video.*)$'), - 'Paid Video', - - 'Paid Other' - ) - ), - - ( - initial_referring_domain = '$direct' - AND (initial_utm_medium IS NULL OR initial_utm_medium = '') - AND (initial_utm_source IS NULL OR initial_utm_source IN ('', '(direct)', 'direct')) - ), - 'Direct', - - coalesce( - hogql_lookupOrganicSourceType(initial_utm_source), - hogql_lookupOrganicDomainType(initial_referring_domain), - if( - match(initial_utm_campaign, '^(.*(([^a-df-z]|^)shop|shopping).*)$'), - 'Organic Shopping', - NULL - ), - hogql_lookupOrganicMediumType(initial_utm_medium), - multiIf( - match(initial_utm_campaign, '^(.*video.*)$'), - 'Organic Video', - - match(initial_utm_medium, 'push$'), - 'Push', - - 'Other' - ) - ) - ) AS breakdown_value, + {channel_type} AS breakdown_value, count() as total_pageviews, uniq(pid) as unique_visitors FROM (SELECT - person.properties.$initial_utm_campaign AS initial_utm_campaign, - person.properties.$initial_utm_medium AS initial_utm_medium, - person.properties.$initial_utm_source AS initial_utm_source, - person.properties.$initial_referring_domain AS initial_referring_domain, - person.properties.$initial_gclid AS initial_gclid, - person.properties.$initial_gad_source AS initial_gad_source, + toString(person.properties.$initial_utm_campaign) AS initial_utm_campaign, + toString(person.properties.$initial_utm_medium) AS initial_utm_medium, + toString(person.properties.$initial_utm_source) AS initial_utm_source, + toString(person.properties.$initial_referring_domain) AS initial_referring_domain, + toString(person.properties.$initial_gclid) AS initial_gclid, + toString(person.properties.$initial_gad_source) AS initial_gad_source, person_id AS pid FROM events SAMPLE {sample_rate} @@ -361,6 +307,16 @@ def to_channel_query(self): "counts_where": self.events_where(), "where_breakdown": self.where_breakdown(), "sample_rate": self._sample_ratio, + "channel_type": create_channel_type_expr( + campaign=ast.Call(name="toString", args=[ast.Field(chain=["initial_utm_campaign"])]), + medium=ast.Call(name="toString", args=[ast.Field(chain=["initial_utm_medium"])]), + source=ast.Call(name="toString", args=[ast.Field(chain=["initial_utm_source"])]), + referring_domain=ast.Call( + name="toString", args=[ast.Field(chain=["initial_referring_domain"])] + ), + gclid=ast.Call(name="toString", args=[ast.Field(chain=["initial_gclid"])]), + gad_source=ast.Call(name="toString", args=[ast.Field(chain=["initial_gad_source"])]), + ), }, ) diff --git a/posthog/hogql_queries/web_analytics/test/test_web_overview.py b/posthog/hogql_queries/web_analytics/test/test_web_overview.py index e4fc03121ab1b..63a26ffea9233 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_overview.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_overview.py @@ -1,4 +1,5 @@ from freezegun import freeze_time +from parameterized import parameterized from posthog.hogql_queries.web_analytics.web_overview import WebOverviewQueryRunner from posthog.schema import WebOverviewQuery, DateRange @@ -35,20 +36,25 @@ def _create_events(self, data, event="$pageview"): ) return person_result - def _run_web_overview_query(self, date_from, date_to, compare=True): + def _run_web_overview_query(self, date_from, date_to, use_sessions_table=False, compare=True): query = WebOverviewQuery( dateRange=DateRange(date_from=date_from, date_to=date_to), properties=[], compare=compare, + useSessionsTable=use_sessions_table, ) runner = WebOverviewQueryRunner(team=self.team, query=query) return runner.calculate() - def test_no_crash_when_no_data(self): - results = self._run_web_overview_query("2023-12-08", "2023-12-15").results + @parameterized.expand([(True,), (False,)]) + def test_no_crash_when_no_data(self, use_sessions_table): + results = self._run_web_overview_query( + "2023-12-08", "2023-12-15", use_sessions_table=use_sessions_table + ).results self.assertEqual(5, len(results)) - def test_increase_in_users(self): + @parameterized.expand([(True,), (False,)]) + def test_increase_in_users(self, use_sessions_table): self._create_events( [ ("p1", [("2023-12-02", "s1a"), ("2023-12-03", "s1a"), ("2023-12-12", "s1b")]), @@ -56,7 +62,9 @@ def test_increase_in_users(self): ] ) - results = self._run_web_overview_query("2023-12-08", "2023-12-15").results + results = self._run_web_overview_query( + "2023-12-08", "2023-12-15", use_sessions_table=use_sessions_table + ).results visitors = results[0] self.assertEqual("visitors", visitors.key) @@ -88,7 +96,8 @@ def test_increase_in_users(self): self.assertEqual(0, bounce.previous) self.assertEqual(None, bounce.changeFromPreviousPct) - def test_all_time(self): + @parameterized.expand([(True,), (False,)]) + def test_all_time(self, use_sessions_table): self._create_events( [ ("p1", [("2023-12-02", "s1a"), ("2023-12-03", "s1a"), ("2023-12-12", "s1b")]), @@ -96,7 +105,9 @@ def test_all_time(self): ] ) - results = self._run_web_overview_query("all", "2023-12-15", compare=False).results + results = self._run_web_overview_query( + "all", "2023-12-15", compare=False, use_sessions_table=use_sessions_table + ).results visitors = results[0] self.assertEqual("visitors", visitors.key) @@ -128,11 +139,14 @@ def test_all_time(self): self.assertEqual(None, bounce.previous) self.assertEqual(None, bounce.changeFromPreviousPct) - def test_filter_test_accounts(self): + @parameterized.expand([(True,), (False,)]) + def test_filter_test_accounts(self, use_sessions_table): # Create 1 test account self._create_events([("test", [("2023-12-02", "s1"), ("2023-12-03", "s1")])]) - results = self._run_web_overview_query("2023-12-01", "2023-12-03").results + results = self._run_web_overview_query( + "2023-12-01", "2023-12-03", use_sessions_table=use_sessions_table + ).results visitors = results[0] self.assertEqual(0, visitors.value) @@ -149,3 +163,25 @@ def test_filter_test_accounts(self): bounce = results[4] self.assertEqual("bounce rate", bounce.key) self.assertEqual(None, bounce.value) + + @parameterized.expand([(True,), (False,)]) + def test_correctly_counts_pageviews_in_long_running_session(self, use_sessions_table): + # this test is important when using the sessions table as the raw sessions table will have 3 entries, one per day + self._create_events( + [ + ("p1", [("2023-12-01", "s1"), ("2023-12-02", "s1"), ("2023-12-03", "s1")]), + ] + ) + + results = self._run_web_overview_query( + "2023-12-01", "2023-12-03", use_sessions_table=use_sessions_table + ).results + + visitors = results[0] + self.assertEqual(1, visitors.value) + + views = results[1] + self.assertEqual(3, views.value) + + sessions = results[2] + self.assertEqual(1, sessions.value) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index e20a2810274a9..da4f98edcbf32 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -101,6 +101,22 @@ def session_having(self, include_previous_period: Optional[bool] = None): self.team, ) + def sessions_table_properties(self, include_previous_period: Optional[bool] = None): + properties = [ + parse_expr( + "sessions.min_timestamp >= {date_from}", + placeholders={ + "date_from": self.query_date_range.previous_period_date_from_as_hogql() + if include_previous_period + else self.query_date_range.date_from_as_hogql(), + }, + ) + ] + return property_to_expr( + properties, + self.team, + ) + def events_where(self): properties = [self.events_where_data_range(), self.query.properties, self._test_account_filters] diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 2019803faf78a..38388315c8f0b 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -19,6 +19,9 @@ class WebOverviewQueryRunner(WebAnalyticsQueryRunner): query_type = WebOverviewQuery def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: + if self.query.useSessionsTable: + return self.to_query_with_session_table() + with self.timings.measure("date_expr"): start = self.query_date_range.previous_period_date_from_as_hogql() mid = self.query_date_range.date_from_as_hogql() @@ -168,6 +171,113 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: }, ) + def to_query_with_session_table(self) -> ast.SelectQuery | ast.SelectUnionQuery: + with self.timings.measure("date_expr"): + start = self.query_date_range.previous_period_date_from_as_hogql() + mid = self.query_date_range.date_from_as_hogql() + end = self.query_date_range.date_to_as_hogql() + + if self.query.compare: + return parse_select( + """ +SELECT + uniq(if(min_timestamp >= {mid} AND min_timestamp < {end}, person_id, NULL)) AS unique_users, + uniq(if(min_timestamp >= {start} AND min_timestamp < {mid}, person_id, NULL)) AS previous_unique_users, + sumIf(filtered_pageview_count, min_timestamp >= {mid} AND min_timestamp < {end}) AS current_pageviews, + sumIf(filtered_pageview_count, min_timestamp >= {start} AND min_timestamp < {mid}) AS previous_pageviews, + uniq(if(min_timestamp >= {mid} AND min_timestamp < {end}, session_id, NULL)) AS unique_sessions, + uniq(if(min_timestamp >= {start} AND min_timestamp < {mid}, session_id, NULL)) AS previous_unique_sessions, + avg(if(min_timestamp >= {mid}, duration, NULL)) AS avg_duration_s, + avg(if(min_timestamp < {mid}, duration, NULL)) AS prev_avg_duration_s, + avg(if(min_timestamp >= {mid}, is_bounce, NULL)) AS bounce_rate, + avg(if(min_timestamp < {mid}, is_bounce, NULL)) AS prev_bounce_rate +FROM ( + SELECT + any(events.person_id) as person_id, + events.`$session_id` as session_id, + min(sessions.min_timestamp) as min_timestamp, + any(sessions.duration) as duration, + any(sessions.pageview_count) as session_pageview_count, + any(sessions.autocapture_count) as session_autocapture_count, + count() as filtered_pageview_count, + and( + duration < 30, + session_pageview_count = 1, + session_autocapture_count = 0 + ) as is_bounce + FROM events + JOIN sessions + ON events.`$session_id` = sessions.session_id + WHERE and( + `$session_id` IS NOT NULL, + event = '$pageview', + timestamp >= {start}, + timestamp < {end}, + {event_properties} + ) + GROUP BY `$session_id` + HAVING and( + min_timestamp >= {start}, + min_timestamp < {end} + ) +) + + """, + placeholders={ + "start": start, + "mid": mid, + "end": end, + "event_properties": self.event_properties(), + }, + ) + else: + return parse_select( + """ + SELECT + uniq(person_id) AS unique_users, + NULL as previous_unique_users, + sum(filtered_pageview_count) AS current_pageviews, + NULL as previous_pageviews, + uniq(session_id) AS unique_sessions, + NULL as previous_unique_sessions, + avg(duration) AS avg_duration_s, + NULL as prev_avg_duration_s, + avg(is_bounce) AS bounce_rate, + NULL as prev_bounce_rate +FROM ( + SELECT + any(events.person_id) as person_id, + events.`$session_id` as session_id, + min(sessions.min_timestamp) as min_timestamp, + any(sessions.duration) as duration, + any(sessions.pageview_count) as session_pageview_count, + any(sessions.autocapture_count) as session_autocapture_count, + count() as filtered_pageview_count, + and( + duration < 30, + session_pageview_count = 1, + session_autocapture_count = 0 + ) as is_bounce + FROM events + JOIN sessions + ON events.`$session_id` = sessions.session_id + WHERE and( + `$session_id` IS NOT NULL, + event = '$pageview', + timestamp >= {mid}, + timestamp < {end}, + {event_properties} + ) + GROUP BY `$session_id` + HAVING and( + min_timestamp >= {mid}, + min_timestamp < {end} + ) +) + """, + placeholders={"mid": mid, "end": end, "event_properties": self.event_properties()}, + ) + def calculate(self): response = execute_hogql_query( query_type="overview_stats_pages_query", diff --git a/posthog/management/commands/migrate_team.py b/posthog/management/commands/migrate_team.py new file mode 100644 index 0000000000000..15a1f4e72b4f5 --- /dev/null +++ b/posthog/management/commands/migrate_team.py @@ -0,0 +1,270 @@ +import datetime as dt +import logging + +from django.db import transaction +from django.core.management.base import BaseCommand, CommandError + +from posthog.batch_exports.models import BATCH_EXPORT_INTERVALS +from posthog.batch_exports.service import ( + backfill_export, + sync_batch_export, + disable_and_delete_export, +) +from posthog.models import ( + BatchExport, + BatchExportBackfill, + BatchExportDestination, + BatchExportRun, + Team, +) +from posthog.temporal.common.client import sync_connect + + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + +EXPORT_NAME = "PostHog HTTP Migration" +VALID_INTERVALS = {i[0] for i in BATCH_EXPORT_INTERVALS} +REGION_URLS = { + "us": "https://app.posthog.com/batch", + "eu": "https://eu.posthog.com/batch", +} + + +class Command(BaseCommand): + help = "Creates an HTTP batch export for a team to migrate data to another PostHog instance, \ + or another team within the same instance." + + def add_arguments(self, parser): + parser.add_argument("--team-id", default=None, type=int, help="Team ID to migrate from (on this instance)") + parser.add_argument("--interval", default=None, type=str, help="Interval to use for the batch export") + parser.add_argument( + "--start-at", + default=None, + type=str, + help="Timestamp to start the backfill from in UTC, 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'", + ) + parser.add_argument( + "--delete-existing", default=False, type=bool, help="Delete existing batch export if it exists" + ) + parser.add_argument("--dest-token", default=None, type=str, help="Destination Project API Key (token)") + parser.add_argument("--dest-region", default=None, type=str, help="Destination region") + parser.add_argument( + "--end-days-from-now", + default=30, + type=int, + help="Number of days from now to automatically end the ongoing export at, the default is usually fine", + ) + + def handle(self, **options): + team_id = options["team_id"] + interval = options["interval"] + start_at = options["start_at"] + dest_token = options["dest_token"] + dest_region = options["dest_region"] + verbose = options["verbosity"] > 1 + + create_args = [ + interval, + start_at, + dest_token, + dest_region, + ] + create_requested = any(create_args) + + if not team_id: + raise CommandError("source Team ID is required") + + team = Team.objects.select_related("organization").get(id=team_id) + + display( + "Team", + name=team.name, + organization=team.organization.name, + ) + + try: + existing_export = BatchExport.objects.get( + team=team, destination__type="HTTP", name=EXPORT_NAME, deleted=False + ) + + display_existing(existing_export=existing_export, verbose=verbose) + + if options["delete_existing"]: + result = input("Enter [y] to continue deleting the existing migration (Ctrl+C to cancel) ") + if result.lower() != "y": + raise CommandError("Didn't receive 'y', exiting") + print() # noqa: T201 + + disable_and_delete_export(existing_export) + existing_export = None + display("Deleted existing batch export and backfill") + except BatchExport.DoesNotExist: + existing_export = None + display("No existing migration was found") + except BatchExport.MultipleObjectsReturned: + raise CommandError( + "More than one existing migration found! This should never happen if the management command is used, we don't know enough to proceed" + ) + + if not create_requested: + # User didn't provide any arguments to create a migration, so they must have just wanted + # to check the status and/or delete the existing migration. + return + elif existing_export: + display( + "Existing migration job already exists and it wasn't deleted, exiting without creating a new batch export" + ) + return + + end_days_from_now = options["end_days_from_now"] + + create_migration( + team_id=team_id, + interval=interval, + start_at=start_at, + dest_token=dest_token, + dest_region=dest_region, + end_days_from_now=end_days_from_now, + ) + + +def display_existing(*, existing_export: BatchExport, verbose: bool): + existing_backfill = BatchExportBackfill.objects.get(batch_export=existing_export) + most_recent_run = BatchExportRun.objects.filter(batch_export=existing_export).order_by("-created_at").first() + + if verbose: + display( + "Existing migration batch export (verbose details)", + batch_export_id=existing_export.id, + paused=existing_export.paused, + interval=existing_export.interval, + created_at=existing_export.created_at, + last_updated_at=existing_export.last_updated_at, + ) + display( + "Existing migration backfill (verbose details)", + backfill_id=existing_backfill.id, + status=existing_backfill.status, + start_at=existing_backfill.start_at, + created_at=existing_backfill.created_at, + last_updated_at=existing_backfill.last_updated_at, + ) + + if not most_recent_run: + display("No batch export runs found, is the migration brand new?") + else: + most_recent_completed_run = ( + BatchExportRun.objects.filter(batch_export=existing_export, status=BatchExportRun.Status.COMPLETED) + .order_by("-finished_at") + .first() + ) + + if most_recent_completed_run: + data_start_at = existing_backfill.start_at + data_end_at = most_recent_completed_run.data_interval_end + display( + "Found an existing migration, range of data migrated:", + start=data_start_at, + end=data_end_at, + interval=existing_export.interval, + ) + if existing_export.paused: + display("The batch export backfill is still catching up to realtime") + else: + display( + "The batch export is unpaused, meaning the primary backfill completed and this is now in realtime export mode", + ) + + if not most_recent_completed_run or verbose: + display( + "Most recent run (verbose details)", + run_id=most_recent_run.id, + status=most_recent_run.status, + error=most_recent_run.latest_error, + data_interval_start=most_recent_run.data_interval_start, + data_interval_end=most_recent_run.data_interval_end, + created_at=most_recent_run.created_at, + last_updated_at=most_recent_run.last_updated_at, + ) + + +def create_migration( + *, team_id: int, interval: str, start_at: str, dest_token: str, dest_region: str, end_days_from_now: int +): + if interval not in VALID_INTERVALS: + raise CommandError("invalid interval, choices are: %s" % VALID_INTERVALS) + + if not dest_token.startswith("phc_"): + raise CommandError("invalid destination token, must start with 'phc_'") + + dest_region = dest_region.lower() + if dest_region not in REGION_URLS: + raise CommandError("invalid destination region, choices are: 'us', 'eu'") + url = REGION_URLS[dest_region] + + try: + start_at_datetime = parse_to_utc(start_at) + except ValueError as e: + raise CommandError("couldn't parse start_at: %s" % e) + + display( + "Creating migration", + interval=interval, + start_at=start_at_datetime, + dest_token=dest_token, + dest_region=dest_region, + url=url, + ) + result = input("Enter [y] to continue creating a new migration (Ctrl+C to cancel) ") + if result.lower() != "y": + raise CommandError("Didn't receive 'y', exiting") + print() # noqa: T201 + + now = dt.datetime.now(dt.timezone.utc) + # This is a precaution so we don't accidentally leave the export running indefinitely. + end_at = now + dt.timedelta(days=end_days_from_now) + + destination = BatchExportDestination( + type=BatchExportDestination.Destination.HTTP, + config={"url": url, "token": dest_token}, + ) + batch_export = BatchExport( + team_id=team_id, + destination=destination, + name=EXPORT_NAME, + interval=interval, + paused=True, + end_at=end_at, + ) + sync_batch_export(batch_export, created=True) + + with transaction.atomic(): + destination.save() + batch_export.save() + + temporal = sync_connect() + backfill_id = backfill_export(temporal, str(batch_export.pk), team_id, start_at_datetime, end_at=None) + display("Backfill started", batch_export_id=batch_export.id, backfill_id=backfill_id) + + +def display(message, **kwargs): + print(message) # noqa: T201 + for key, value in kwargs.items(): + if isinstance(value, dt.datetime): + value = value.strftime("%Y-%m-%d %H:%M:%S") + print(f" {key} = {value}") # noqa: T201 + print() # noqa: T201 + + +def parse_to_utc(date_str: str) -> dt.datetime: + try: + parsed_datetime = dt.datetime.strptime(date_str, "%Y-%m-%d") + except ValueError: + try: + parsed_datetime = dt.datetime.strptime(date_str, "%Y-%m-%d %H:%M:%S") + except ValueError: + raise ValueError("Invalid date format. Expected 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.") + + utc_datetime = parsed_datetime.replace(tzinfo=dt.timezone.utc) + return utc_datetime diff --git a/posthog/migrations/0396_projects_and_environments.py b/posthog/migrations/0396_projects_and_environments.py new file mode 100644 index 0000000000000..c571200b4ec51 --- /dev/null +++ b/posthog/migrations/0396_projects_and_environments.py @@ -0,0 +1,59 @@ +# Generated by Django 4.1.13 on 2024-03-08 22:47 + +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0395_alter_batchexportbackfill_end_at"), + ] + + operations = [ + migrations.AlterField( + model_name="team", + name="name", + field=models.CharField( + default="Default project", + max_length=200, + validators=[django.core.validators.MinLengthValidator(1, "Project must have a name!")], + ), + ), + migrations.CreateModel( + name="Project", + fields=[ + ("id", models.BigIntegerField(primary_key=True, serialize=False, verbose_name="ID")), + ( + "name", + models.CharField( + default="Default project", + max_length=200, + validators=[django.core.validators.MinLengthValidator(1, "Project must have a name!")], + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ( + "organization", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="projects", + related_query_name="project", + to="posthog.organization", + ), + ), + ], + ), + migrations.AddField( + model_name="team", + name="project", + field=models.ForeignKey( + null=True, + blank=False, + on_delete=django.db.models.deletion.CASCADE, + related_name="teams", + related_query_name="team", + to="posthog.project", + ), + ), + ] diff --git a/posthog/models/__init__.py b/posthog/models/__init__.py index d9634fbbdd4c1..9bbaa713e53bf 100644 --- a/posthog/models/__init__.py +++ b/posthog/models/__init__.py @@ -56,6 +56,7 @@ PluginLogEntry, PluginSourceFile, ) +from .project import Project from .property import Property from .property_definition import PropertyDefinition from .sharing_configuration import SharingConfiguration @@ -122,6 +123,7 @@ "PluginConfig", "PluginLogEntry", "PluginSourceFile", + "Project", "Property", "PropertyDefinition", "RetentionFilter", diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr index 534870c348300..1bd4315507dde 100644 --- a/posthog/models/filters/test/__snapshots__/test_filter.ambr +++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr @@ -4,6 +4,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -63,6 +64,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -122,6 +124,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -181,6 +184,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -240,6 +244,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 254ba7ae64d9b..8740a0f34c453 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -60,11 +60,11 @@ def bootstrap( **kwargs, ) -> Tuple["Organization", Optional["OrganizationMembership"], "Team"]: """Instead of doing the legwork of creating an organization yourself, delegate the details with bootstrap.""" - from .team import Team # Avoiding circular import + from .project import Project # Avoiding circular import with transaction.atomic(): organization = Organization.objects.create(**kwargs) - team = Team.objects.create(organization=organization, **(team_fields or {})) + _, team = Project.objects.create_with_team(organization=organization, team_fields=team_fields) organization_membership: Optional[OrganizationMembership] = None if user is not None: organization_membership = OrganizationMembership.objects.create( diff --git a/posthog/models/project.py b/posthog/models/project.py new file mode 100644 index 0000000000000..c4ead260fb780 --- /dev/null +++ b/posthog/models/project.py @@ -0,0 +1,43 @@ +from typing import TYPE_CHECKING, Optional, Tuple +from django.db import models +from django.db import transaction +from django.core.validators import MinLengthValidator + +if TYPE_CHECKING: + from .team import Team + + +class ProjectManager(models.Manager): + def create_with_team(self, team_fields: Optional[dict] = None, **kwargs) -> Tuple["Project", "Team"]: + from .team import Team + + with transaction.atomic(): + common_id = Team.objects.increment_id_sequence() + project = self.create(id=common_id, **kwargs) + team = Team.objects.create( + id=common_id, organization=project.organization, project=project, **(team_fields or {}) + ) + return project, team + + +class Project(models.Model): + """DO NOT USE YET - you probably mean the `Team` model instead. + + `Project` is part of the environemnts feature, which is a work in progress. + """ + + id: models.BigIntegerField = models.BigIntegerField(primary_key=True, verbose_name="ID") + organization: models.ForeignKey = models.ForeignKey( + "posthog.Organization", + on_delete=models.CASCADE, + related_name="projects", + related_query_name="project", + ) + name: models.CharField = models.CharField( + max_length=200, + default="Default project", + validators=[MinLengthValidator(1, "Project must have a name!")], + ) + created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) + + objects: ProjectManager = ProjectManager() diff --git a/posthog/models/sessions/sql.py b/posthog/models/sessions/sql.py index 1850827acd75c..ea9ef27034c72 100644 --- a/posthog/models/sessions/sql.py +++ b/posthog/models/sessions/sql.py @@ -158,7 +158,7 @@ def source_column(column_name: str) -> str: sumIf(1, event='$autocapture') as autocapture_count FROM {database}.sharded_events -WHERE `$session_id` IS NOT NULL AND `$session_id` != '' AND toStartOfDay(timestamp) >= '2024-03-08' +WHERE `$session_id` IS NOT NULL AND `$session_id` != '' GROUP BY `$session_id`, team_id """.format( table_name=f"{TABLE_BASE_NAME}_mv", @@ -188,6 +188,7 @@ def source_column(column_name: str) -> str: ) ) + # Distributed engine tables are only created if CLICKHOUSE_REPLICATED # This table is responsible for writing to sharded_sessions based on a sharding key. diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index c815e75811456..93a486cdc98fc 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -13,8 +13,9 @@ MaxValueValidator, MinValueValidator, ) -from django.db import models +from django.db import models, connection from django.db.models.signals import post_delete, post_save +from django.db import transaction from zoneinfo import ZoneInfo from posthog.clickhouse.query_tagging import tag_queries from posthog.cloud_utils import is_cloud @@ -98,9 +99,23 @@ def create_with_data(self, user: Any = None, default_dashboards: bool = True, ** return team def create(self, *args, **kwargs) -> "Team": - if kwargs.get("organization") is None and kwargs.get("organization_id") is None: - raise ValueError("Creating organization-less projects is prohibited") - return super().create(*args, **kwargs) + from ..project import Project + + with transaction.atomic(): + if "id" not in kwargs: + kwargs["id"] = self.increment_id_sequence() + if kwargs.get("project") is None and kwargs.get("project_id") is None: + # If a parent project is not provided for this team, ensure there is one + # This should be removed once environments are fully rolled out + project_kwargs = {} + if organization := kwargs.get("organization"): + project_kwargs["organization"] = organization + elif organization_id := kwargs.get("organization_id"): + project_kwargs["organization_id"] = organization_id + if name := kwargs.get("name"): + project_kwargs["name"] = name + kwargs["project"] = Project.objects.create(id=kwargs["id"], **project_kwargs) + return super().create(*args, **kwargs) def get_team_from_token(self, token: Optional[str]) -> Optional["Team"]: if not token: @@ -125,6 +140,15 @@ def get_team_from_cache_or_token(self, token: Optional[str]) -> Optional["Team"] except Team.DoesNotExist: return None + def increment_id_sequence(self) -> int: + """Increment the `Team.id` field's sequence and return the latest value. + + Use only when actually neeeded to avoid wasting sequence values.""" + cursor = connection.cursor() + cursor.execute("SELECT nextval('posthog_team_id_seq')") + result = cursor.fetchone() + return result[0] + def get_default_data_attributes() -> List[str]: return ["data-attr"] @@ -146,6 +170,14 @@ class Team(UUIDClassicModel): related_name="teams", related_query_name="team", ) + project: models.ForeignKey = models.ForeignKey( + "posthog.Project", + on_delete=models.CASCADE, + related_name="teams", + related_query_name="team", + null=True, + blank=False, + ) api_token: models.CharField = models.CharField( max_length=200, unique=True, @@ -155,7 +187,7 @@ class Team(UUIDClassicModel): app_urls: ArrayField = ArrayField(models.CharField(max_length=200, null=True), default=list, blank=True) name: models.CharField = models.CharField( max_length=200, - default="Default Project", + default="Default project", validators=[MinLengthValidator(1, "Project must have a name!")], ) slack_incoming_webhook: models.CharField = models.CharField(max_length=500, null=True, blank=True) diff --git a/posthog/models/test/test_project.py b/posthog/models/test/test_project.py new file mode 100644 index 0000000000000..d6bfe0ed3a36a --- /dev/null +++ b/posthog/models/test/test_project.py @@ -0,0 +1,73 @@ +from unittest import mock +from posthog.models.project import Project +from posthog.models.team.team import Team +from posthog.test.base import BaseTest + + +class TestProject(BaseTest): + def test_create_project_with_team_no_team_fields(self): + project, team = Project.objects.create_with_team( + organization=self.organization, + name="Test project", + ) + + self.assertEqual(project.id, team.id) + self.assertEqual(project.name, "Test project") + self.assertEqual(project.organization, self.organization) + + self.assertEqual( + team.name, + "Default project", # TODO: When Environments are rolled out, ensure this says "Default environment" + ) + self.assertEqual(team.organization, self.organization) + self.assertEqual(team.project, project) + + def test_create_project_with_team_with_team_fields(self): + project, team = Project.objects.create_with_team( + organization=self.organization, + name="Test project", + team_fields={"name": "Test team", "access_control": True}, + ) + + self.assertEqual(project.id, team.id) + self.assertEqual(project.name, "Test project") + self.assertEqual(project.organization, self.organization) + + self.assertEqual(team.name, "Test team") + self.assertEqual(team.organization, self.organization) + self.assertEqual(team.project, project) + self.assertEqual(team.access_control, True) + + def test_create_project_with_team_uses_team_id_sequence(self): + expected_common_id = Team.objects.increment_id_sequence() + 1 + + project, team = Project.objects.create_with_team( + organization=self.organization, + name="Test project", + team_fields={"name": "Test team", "access_control": True}, + ) + + self.assertEqual(project.id, expected_common_id) + self.assertEqual(project.name, "Test project") + self.assertEqual(project.organization, self.organization) + + self.assertEqual(team.id, expected_common_id) + self.assertEqual(team.name, "Test team") + self.assertEqual(team.organization, self.organization) + self.assertEqual(team.project, project) + self.assertEqual(team.access_control, True) + + @mock.patch("posthog.models.team.team.Team.objects.create", side_effect=Exception) + def test_create_project_with_team_does_not_create_if_team_fails(self, mock_create): + initial_team_count = Team.objects.count() + initial_project_count = Project.objects.count() + + with self.assertRaises(Exception): + Project.objects.create_with_team( + organization=self.organization, + name="Test project", + team_fields={"name": "Test team", "access_control": True}, + ) + + self.assertEqual(Team.objects.count(), initial_team_count) + self.assertEqual(Project.objects.count(), initial_project_count) diff --git a/posthog/schema.py b/posthog/schema.py index d82c6c21b48ff..f97c682232543 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -1643,6 +1643,7 @@ class WebAnalyticsQueryBase(BaseModel): dateRange: Optional[DateRange] = None properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] sampling: Optional[Sampling] = None + useSessionsTable: Optional[bool] = None class WebOverviewQuery(BaseModel): @@ -1655,6 +1656,7 @@ class WebOverviewQuery(BaseModel): properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] response: Optional[WebOverviewQueryResponse] = None sampling: Optional[Sampling] = None + useSessionsTable: Optional[bool] = None class WebStatsTableQuery(BaseModel): @@ -1671,6 +1673,7 @@ class WebStatsTableQuery(BaseModel): properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] response: Optional[WebStatsTableQueryResponse] = None sampling: Optional[Sampling] = None + useSessionsTable: Optional[bool] = None class WebTopClicksQuery(BaseModel): @@ -1682,6 +1685,7 @@ class WebTopClicksQuery(BaseModel): properties: List[Union[EventPropertyFilter, PersonPropertyFilter]] response: Optional[WebTopClicksQueryResponse] = None sampling: Optional[Sampling] = None + useSessionsTable: Optional[bool] = None class AnyResponseType( diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index e681525e1bf50..fad3c08168d0b 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -4,6 +4,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -63,6 +64,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -122,6 +124,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -181,6 +184,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -240,6 +244,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -330,6 +335,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -458,6 +464,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -627,6 +634,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -779,6 +787,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -838,6 +847,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -897,6 +907,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -956,6 +967,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1015,6 +1027,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1074,6 +1087,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1164,6 +1178,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1412,6 +1427,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1502,6 +1518,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1850,6 +1867,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -1940,6 +1958,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2290,6 +2309,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2391,6 +2411,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2743,6 +2764,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -2833,6 +2855,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3216,6 +3239,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3306,6 +3330,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3401,6 +3426,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3730,6 +3756,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3820,6 +3847,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -3872,6 +3900,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4496,6 +4525,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4586,6 +4616,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -4928,6 +4959,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5018,6 +5050,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5373,6 +5406,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -5463,6 +5497,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr index bf055e2fda0ee..d4ed303a0dda1 100644 --- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr +++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr @@ -75,6 +75,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -331,6 +332,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index ae003af973787..c4e44a36cb867 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -37,7 +37,7 @@ class CreateExternalDataJobInputs: @activity.defn -async def create_external_data_job_model(inputs: CreateExternalDataJobInputs) -> Tuple[str, list[str]]: +async def create_external_data_job_model(inputs: CreateExternalDataJobInputs) -> Tuple[str, list[Tuple[str, str]]]: run = await sync_to_async(create_external_data_job)( team_id=inputs.team_id, external_data_source_id=inputs.external_data_source_id, @@ -105,7 +105,7 @@ async def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInpu class ValidateSchemaInputs: run_id: str team_id: int - schemas: list[str] + schemas: list[Tuple[str, str]] @activity.defn @@ -133,7 +133,7 @@ class ExternalDataJobInputs: team_id: int source_id: uuid.UUID run_id: str - schemas: list[str] + schemas: list[Tuple[str, str]] @activity.defn @@ -153,6 +153,8 @@ async def run_external_data_job(inputs: ExternalDataJobInputs) -> None: dataset_name=model.folder_path, ) + endpoints = [schema[1] for schema in inputs.schemas] + source = None if model.pipeline.source_type == ExternalDataSource.Type.STRIPE: from posthog.temporal.data_imports.pipelines.stripe.helpers import stripe_source @@ -162,7 +164,7 @@ async def run_external_data_job(inputs: ExternalDataJobInputs) -> None: raise ValueError(f"Stripe secret key not found for job {model.id}") source = stripe_source( api_key=stripe_secret_key, - endpoints=tuple(inputs.schemas), + endpoints=tuple(endpoints), team_id=inputs.team_id, job_id=inputs.run_id, ) @@ -181,7 +183,7 @@ async def run_external_data_job(inputs: ExternalDataJobInputs) -> None: source = hubspot( api_key=hubspot_access_code, refresh_token=refresh_token, - endpoints=tuple(inputs.schemas), + endpoints=tuple(endpoints), ) elif model.pipeline.source_type == ExternalDataSource.Type.POSTGRES: from posthog.temporal.data_imports.pipelines.postgres import postgres_source @@ -201,7 +203,7 @@ async def run_external_data_job(inputs: ExternalDataJobInputs) -> None: database=database, sslmode="prefer" if settings.TEST or settings.DEBUG else "require", schema=schema, - table_names=inputs.schemas, + table_names=endpoints, ) else: diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index ad6d53aa3a9e6..5297f2e39ac29 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -16,7 +16,7 @@ class PipelineInputs: source_id: UUID run_id: str - schemas: list[str] + schemas: list[tuple[str, str]] dataset_name: str job_type: str team_id: int diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index c92b81772a190..29555192bb90e 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -1,6 +1,6 @@ import uuid from unittest import mock - +from typing import Optional import pytest from asgiref.sync import sync_to_async from django.test import override_settings @@ -32,6 +32,7 @@ from posthog.temporal.data_imports.pipelines.schemas import ( PIPELINE_TYPE_SCHEMA_DEFAULT_MAPPING, ) +from posthog.models import Team from posthog.temporal.data_imports.pipelines.pipeline import DataImportPipeline from temporalio.testing import WorkflowEnvironment from temporalio.common import RetryPolicy @@ -118,6 +119,15 @@ async def postgres_connection(postgres_config, setup_postgres_test_db): await connection.close() +async def _create_schema(schema_name: str, source: ExternalDataSource, team: Team, table_id: Optional[str] = None): + return await sync_to_async(ExternalDataSchema.objects.create)( + name=schema_name, + team_id=team.id, + source_id=source.pk, + table_id=table_id, + ) + + @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_create_external_job_activity(activity_environment, team, **kwargs): @@ -232,7 +242,9 @@ async def setup_job_1(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - schemas = ["Customer"] + customer_schema = await _create_schema("Customer", new_source, team) + schemas = [(customer_schema.id, "Customer")] + inputs = ExternalDataJobInputs( team_id=team.id, run_id=new_job.pk, @@ -262,7 +274,9 @@ async def setup_job_2(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - schemas = ["Customer", "Invoice"] + customer_schema = await _create_schema("Customer", new_source, team) + invoice_schema = await _create_schema("Invoice", new_source, team) + schemas = [(customer_schema.id, "Customer"), (invoice_schema.id, "Invoice")] inputs = ExternalDataJobInputs( team_id=team.id, run_id=new_job.pk, @@ -350,7 +364,8 @@ async def setup_job_1(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - schemas = ["Customer"] + customer_schema = await _create_schema("Customer", new_source, team) + schemas = [(customer_schema.id, "Customer")] inputs = ExternalDataJobInputs( team_id=team.id, run_id=new_job.pk, @@ -414,7 +429,8 @@ async def setup_job_1(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - schemas = ["Customer"] + customer_schema = await _create_schema("Customer", new_source, team) + schemas = [(customer_schema.id, "Customer")] inputs = ExternalDataJobInputs( team_id=team.id, run_id=new_job.pk, @@ -476,15 +492,26 @@ async def test_validate_schema_and_update_table_activity(activity_environment, t rows_synced=0, ) + test_1_schema = await _create_schema("test-1", new_source, team) + test_2_schema = await _create_schema("test-2", new_source, team) + test_3_schema = await _create_schema("test-3", new_source, team) + test_4_schema = await _create_schema("test-4", new_source, team) + test_5_schema = await _create_schema("test-5", new_source, team) + schemas = [ + (test_1_schema.id, "test-1"), + (test_2_schema.id, "test-2"), + (test_3_schema.id, "test-3"), + (test_4_schema.id, "test-4"), + (test_5_schema.id, "test-5"), + ] + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): mock_get_columns.return_value = {"id": "string"} await activity_environment.run( validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, team_id=team.id, schemas=["test-1", "test-2", "test-3", "test-4", "test-5"] - ), + ValidateSchemaInputs(run_id=new_job.pk, team_id=team.id, schemas=schemas), ) assert mock_get_columns.call_count == 10 @@ -504,6 +531,7 @@ async def test_validate_schema_and_update_table_activity_with_existing(activity_ status="running", source_type="Stripe", job_inputs={"stripe_secret_key": "test-key"}, + prefix="stripe_", ) old_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( @@ -521,7 +549,7 @@ async def test_validate_schema_and_update_table_activity_with_existing(activity_ url_pattern = await sync_to_async(old_job.url_pattern_by_schema)("test-1") - await sync_to_async(DataWarehouseTable.objects.create)( + existing_table = await sync_to_async(DataWarehouseTable.objects.create)( credential=old_credential, name="stripe_test-1", format="Parquet", @@ -537,15 +565,26 @@ async def test_validate_schema_and_update_table_activity_with_existing(activity_ rows_synced=0, ) + test_1_schema = await _create_schema("test-1", new_source, team, table_id=existing_table.id) + test_2_schema = await _create_schema("test-2", new_source, team) + test_3_schema = await _create_schema("test-3", new_source, team) + test_4_schema = await _create_schema("test-4", new_source, team) + test_5_schema = await _create_schema("test-5", new_source, team) + schemas = [ + (test_1_schema.id, "test-1"), + (test_2_schema.id, "test-2"), + (test_3_schema.id, "test-3"), + (test_4_schema.id, "test-4"), + (test_5_schema.id, "test-5"), + ] + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): mock_get_columns.return_value = {"id": "string"} await activity_environment.run( validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, team_id=team.id, schemas=["test-1", "test-2", "test-3", "test-4", "test-5"] - ), + ValidateSchemaInputs(run_id=new_job.pk, team_id=team.id, schemas=schemas), ) assert mock_get_columns.call_count == 10 @@ -595,9 +634,13 @@ async def test_validate_schema_and_update_table_activity_half_run(activity_envir }, ] + broken_schema = await _create_schema("broken_schema", new_source, team) + test_schema = await _create_schema("test_schema", new_source, team) + schemas = [(broken_schema.id, "broken_schema"), (test_schema.id, "test_schema")] + await activity_environment.run( validate_schema_activity, - ValidateSchemaInputs(run_id=new_job.pk, team_id=team.id, schemas=["broken_schema", "test_schema"]), + ValidateSchemaInputs(run_id=new_job.pk, team_id=team.id, schemas=schemas), ) assert mock_get_columns.call_count == 1 @@ -626,15 +669,26 @@ async def test_create_schema_activity(activity_environment, team, **kwargs): rows_synced=0, ) + test_1_schema = await _create_schema("test-1", new_source, team) + test_2_schema = await _create_schema("test-2", new_source, team) + test_3_schema = await _create_schema("test-3", new_source, team) + test_4_schema = await _create_schema("test-4", new_source, team) + test_5_schema = await _create_schema("test-5", new_source, team) + schemas = [ + (test_1_schema.id, "test-1"), + (test_2_schema.id, "test-2"), + (test_3_schema.id, "test-3"), + (test_4_schema.id, "test-4"), + (test_5_schema.id, "test-5"), + ] + with mock.patch( "posthog.warehouse.models.table.DataWarehouseTable.get_columns" ) as mock_get_columns, override_settings(**AWS_BUCKET_MOCK_SETTINGS): mock_get_columns.return_value = {"id": "string"} await activity_environment.run( validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, team_id=team.id, schemas=["test-1", "test-2", "test-3", "test-4", "test-5"] - ), + ValidateSchemaInputs(run_id=new_job.pk, team_id=team.id, schemas=schemas), ) assert mock_get_columns.call_count == 10 @@ -802,7 +856,8 @@ async def setup_job_1(): new_job = await sync_to_async(ExternalDataJob.objects.filter(id=new_job.id).prefetch_related("pipeline").get)() - schemas = ["posthog_test"] + posthog_test_schema = await _create_schema("posthog_test", new_source, team) + schemas = [(posthog_test_schema.id, "posthog_test")] inputs = ExternalDataJobInputs( team_id=team.id, run_id=new_job.pk, diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index 7e048cf5536d2..2c2169fe4c5f2 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -95,6 +95,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -260,6 +261,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", @@ -589,6 +591,7 @@ SELECT "posthog_team"."id", "posthog_team"."uuid", "posthog_team"."organization_id", + "posthog_team"."project_id", "posthog_team"."api_token", "posthog_team"."app_urls", "posthog_team"."name", diff --git a/posthog/test/base.py b/posthog/test/base.py index 20ad2848c607d..8e914dd2b59d5 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -59,6 +59,7 @@ TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL, ) from posthog.models.person.util import bulk_create_persons, create_person +from posthog.models.project import Project from posthog.models.sessions.sql import ( DROP_SESSION_TABLE_SQL, DROP_SESSION_MATERIALIZED_VIEW_SQL, @@ -92,18 +93,20 @@ def _setup_test_data(klass): klass.organization = Organization.objects.create(name=klass.CONFIG_ORGANIZATION_NAME) - klass.team = Team.objects.create( + klass.project, klass.team = Project.objects.create_with_team( organization=klass.organization, - api_token=klass.CONFIG_API_TOKEN, - test_account_filters=[ - { - "key": "email", - "value": "@posthog.com", - "operator": "not_icontains", - "type": "person", - } - ], - has_completed_onboarding_for={"product_analytics": True}, + team_fields=dict( + api_token=klass.CONFIG_API_TOKEN, + test_account_filters=[ + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } + ], + has_completed_onboarding_for={"product_analytics": True}, + ), ) if klass.CONFIG_EMAIL: klass.user = User.objects.create_and_join(klass.organization, klass.CONFIG_EMAIL, klass.CONFIG_PASSWORD) @@ -206,6 +209,7 @@ class PostHogTestCase(SimpleTestCase): # Test data definition stubs organization: Organization = None # type: ignore + project: Project = None # type: ignore team: Team = None # type: ignore user: User = None # type: ignore organization_membership: OrganizationMembership = None # type: ignore diff --git a/posthog/test/test_team.py b/posthog/test/test_team.py index ac95e5c8cc7e7..25f73dcfa87a9 100644 --- a/posthog/test/test_team.py +++ b/posthog/test/test_team.py @@ -12,6 +12,7 @@ User, ) from posthog.models.instance_setting import override_instance_config +from posthog.models.project import Project from posthog.models.team import get_team_in_cache, util from posthog.plugins.test.mock import mocked_plugin_requests_get from posthog.utils import PersonOnEventsMode @@ -45,7 +46,7 @@ def test_save_updates_cache(self): self.assertEqual(cached_team.api_token, api_token) self.assertEqual(cached_team.uuid, str(team.uuid)) self.assertEqual(cached_team.id, team.id) - self.assertEqual(cached_team.name, "Default Project") + self.assertEqual(cached_team.name, "Default project") team.name = "New name" team.session_recording_opt_in = True @@ -165,3 +166,40 @@ def test_team_on_self_hosted_uses_instance_setting_to_determine_person_on_events team = Team.objects.create_with_data(organization=self.organization) self.assertEqual(team.person_on_events_mode, PersonOnEventsMode.DISABLED) mock_feature_enabled.assert_not_called() + + def test_each_team_gets_project_with_default_name_and_same_id(self): + # Can be removed once environments are fully rolled out + team = Team.objects.create_with_data(organization=self.organization) + + project = Project.objects.filter(id=team.id).first() + + assert project is not None + self.assertEqual(project.name, "Default project") + + def test_each_team_gets_project_with_custom_name_and_same_id(self): + # Can be removed once environments are fully rolled out + team = Team.objects.create_with_data(organization=self.organization, name="Hogflix") + + project = Project.objects.filter(id=team.id).first() + + assert project is not None + self.assertEqual(project.organization, team.organization) + self.assertEqual(project.name, "Hogflix") + + @mock.patch("posthog.models.project.Project.objects.create", side_effect=Exception) + def test_team_not_created_if_project_creation_fails(self, mock_create): + # Can be removed once environments are fully rolled out + initial_team_count = Team.objects.count() + initial_project_count = Project.objects.count() + + with self.assertRaises(Exception): + Team.objects.create_with_data(organization=self.organization, name="Hogflix") + + self.assertEqual(Team.objects.count(), initial_team_count) + self.assertEqual(Project.objects.count(), initial_project_count) + + def test_increment_id_sequence(self): + initial = Team.objects.increment_id_sequence() + subsequent = Team.objects.increment_id_sequence() + + self.assertEqual(subsequent, initial + 1) diff --git a/posthog/utils.py b/posthog/utils.py index 769d289c93854..111ed0237cbcd 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -1329,3 +1329,7 @@ def label_for_team_id_to_track(team_id: int) -> str: pass return "unknown" + + +def camel_to_snake_case(name: str) -> str: + return re.sub(r"(? None: +async def validate_schema_and_update_table(run_id: str, team_id: int, schemas: list[Tuple[str, str]]) -> None: """ Validates the schemas of data that has been synced by external data job. @@ -65,9 +66,12 @@ async def validate_schema_and_update_table(run_id: str, team_id: int, schemas: l access_secret=settings.AIRBYTE_BUCKET_SECRET, ) - for _schema_name in schemas: + for _schema in schemas: + _schema_id = _schema[0] + _schema_name = _schema[1] + table_name = f"{job.pipeline.prefix or ''}{job.pipeline.source_type}_{_schema_name}".lower() - new_url_pattern = job.url_pattern_by_schema(_schema_name) + new_url_pattern = job.url_pattern_by_schema(camel_to_snake_case(_schema_name)) # Check try: @@ -92,11 +96,10 @@ async def validate_schema_and_update_table(run_id: str, team_id: int, schemas: l # create or update table_created = None if last_successful_job: - old_url_pattern = last_successful_job.url_pattern_by_schema(_schema_name) try: - table_created = await get_table_by_url_pattern_and_source( - team_id=job.team_id, source_id=job.pipeline.id, url_pattern=old_url_pattern - ) + table_created = await get_table_by_schema_id(_schema_id, team_id) + if not table_created: + raise DataWarehouseTable.DoesNotExist except Exception: table_created = None else: @@ -111,9 +114,7 @@ async def validate_schema_and_update_table(run_id: str, team_id: int, schemas: l await asave_datawarehousetable(table_created) # schema could have been deleted by this point - schema_model = await aget_schema_if_exists( - schema_name=_schema_name, team_id=job.team_id, source_id=job.pipeline.id - ) + schema_model = await aget_schema_by_id(schema_id=_schema_id, team_id=job.team_id) if schema_model: schema_model.table = table_created diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index 3d4423b24778e..ad668abc8589a 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -41,9 +41,14 @@ def aget_schema_if_exists(schema_name: str, team_id: int, source_id: uuid.UUID) return get_schema_if_exists(schema_name=schema_name, team_id=team_id, source_id=source_id) +@database_sync_to_async +def aget_schema_by_id(schema_id: str, team_id: int) -> ExternalDataSchema | None: + return ExternalDataSchema.objects.get(id=schema_id, team_id=team_id) + + def get_active_schemas_for_source_id(source_id: uuid.UUID, team_id: int): schemas = ExternalDataSchema.objects.filter(team_id=team_id, source_id=source_id, should_sync=True).values().all() - return [val["name"] for val in schemas] + return [(val["id"], val["name"]) for val in schemas] def get_all_schemas_for_source_id(source_id: uuid.UUID, team_id: int): diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index f8cd3cf73e127..c2c87e67f3e41 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -20,6 +20,7 @@ sane_repr, ) from posthog.warehouse.models.util import remove_named_tuples +from posthog.warehouse.models.external_data_schema import ExternalDataSchema from django.db.models import Q from .credential import DataWarehouseCredential from uuid import UUID @@ -154,6 +155,11 @@ def get_table_by_url_pattern_and_source(url_pattern: str, source_id: UUID, team_ ) +@database_sync_to_async +def get_table_by_schema_id(schema_id: str, team_id: int): + return ExternalDataSchema.objects.get(id=schema_id, team_id=team_id).table + + @database_sync_to_async def acreate_datawarehousetable(**kwargs): return DataWarehouseTable.objects.create(**kwargs) diff --git a/production.Dockerfile b/production.Dockerfile index 9b71e97f34b69..6d43498a5350b 100644 --- a/production.Dockerfile +++ b/production.Dockerfile @@ -21,9 +21,9 @@ # # --------------------------------------------------------- # -FROM node:18.12.1-bullseye-slim AS frontend-build +FROM node:18.19.1-bullseye-slim AS frontend-build WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] COPY package.json pnpm-lock.yaml ./ COPY patches/ patches/ @@ -42,9 +42,9 @@ RUN pnpm build # # --------------------------------------------------------- # -FROM node:18.12.1-bullseye-slim AS plugin-server-build +FROM node:18.19.1-bullseye-slim AS plugin-server-build WORKDIR /code/plugin-server -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] # Compile and install Node.js dependencies. COPY ./plugin-server/package.json ./plugin-server/pnpm-lock.yaml ./plugin-server/tsconfig.json ./ @@ -85,7 +85,7 @@ RUN corepack enable && \ # FROM python:3.10.10-slim-bullseye AS posthog-build WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] # Compile and install Python dependencies. # We install those dependencies on a custom folder that we will @@ -120,7 +120,7 @@ RUN SKIP_SERVICE_VERSION_REQUIREMENTS=1 SECRET_KEY='unsafe secret key for collec # FROM debian:bullseye-slim AS fetch-geoip-db WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] # Fetch the GeoLite2-City database that will be used for IP geolocation within Django. RUN apt-get update && \ @@ -166,19 +166,19 @@ RUN set -ex \ && CC_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_CFLAGS_MAINT_APPEND="-Wp,-D_FORTIFY_SOURCE=2 -fPIC" dpkg-buildflags --get CFLAGS)" \ && LD_OPT="$(DEB_BUILD_MAINT_OPTIONS="hardening=+all,-pie" DEB_LDFLAGS_MAINT_APPEND="-Wl,--as-needed -pie" dpkg-buildflags --get LDFLAGS)" \ && CONFIGURE_ARGS_MODULES="--prefix=/usr \ - --statedir=/var/lib/unit \ - --control=unix:/var/run/control.unit.sock \ - --runstatedir=/var/run \ - --pid=/var/run/unit.pid \ - --logdir=/var/log \ - --log=/var/log/unit.log \ - --tmpdir=/var/tmp \ - --user=unit \ - --group=unit \ - --openssl \ - --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ + --statedir=/var/lib/unit \ + --control=unix:/var/run/control.unit.sock \ + --runstatedir=/var/run \ + --pid=/var/run/unit.pid \ + --logdir=/var/log \ + --log=/var/log/unit.log \ + --tmpdir=/var/tmp \ + --user=unit \ + --group=unit \ + --openssl \ + --libdir=/usr/lib/$DEB_HOST_MULTIARCH" \ && CONFIGURE_ARGS="$CONFIGURE_ARGS_MODULES \ - --njs" \ + --njs" \ && make -j $NCPU -C pkg/contrib .njs \ && export PKG_CONFIG_PATH=$(pwd)/pkg/contrib/njs/build \ && ./configure $CONFIGURE_ARGS --cc-opt="$CC_OPT" --ld-opt="$LD_OPT" --modulesdir=/usr/lib/unit/debug-modules --debug \ @@ -200,8 +200,8 @@ RUN set -ex \ && cd \ && rm -rf /usr/src/unit \ && for f in /usr/sbin/unitd /usr/lib/unit/modules/*.unit.so; do \ - ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ - done \ + ldd $f | awk '/=>/{print $(NF-1)}' | while read n; do dpkg-query -S $n; done | sed 's/^\([^:]\+\):.*$/\1/' | sort | uniq >> /requirements.apt; \ + done \ && apt-mark showmanual | xargs apt-mark auto > /dev/null \ && { [ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; } \ && /bin/true \ @@ -209,13 +209,13 @@ RUN set -ex \ && mkdir -p /docker-entrypoint.d/ \ && groupadd --gid 998 unit \ && useradd \ - --uid 998 \ - --gid unit \ - --no-create-home \ - --home /nonexistent \ - --comment "unit user" \ - --shell /bin/false \ - unit \ + --uid 998 \ + --gid unit \ + --no-create-home \ + --home /nonexistent \ + --comment "unit user" \ + --shell /bin/false \ + unit \ && apt-get update \ && apt-get --no-install-recommends --no-install-suggests -y install curl $(cat /requirements.apt) \ && apt-get purge -y --auto-remove build-essential \ @@ -237,7 +237,7 @@ CMD ["unitd", "--no-daemon", "--control", "unix:/var/run/control.unit.sock"] # FROM unit-131-python-310 WORKDIR /code -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +SHELL ["/bin/bash", "-e", "-o", "pipefail", "-c"] ENV PYTHONUNBUFFERED 1 # Install OS runtime dependencies.