diff --git a/bin/copy-posthog-js b/bin/copy-posthog-js index 72fb6c6ec1fd6..ed66c58018772 100755 --- a/bin/copy-posthog-js +++ b/bin/copy-posthog-js @@ -8,3 +8,4 @@ cp node_modules/posthog-js/dist/array.js* frontend/dist/ cp node_modules/posthog-js/dist/array.full.js* frontend/dist/ cp node_modules/posthog-js/dist/recorder.js* frontend/dist/ cp node_modules/posthog-js/dist/recorder-v2.js* frontend/dist/ +cp node_modules/posthog-js/dist/surveys.js* frontend/dist/ diff --git a/frontend/__snapshots__/lemon-ui-icons--shelf-b.png b/frontend/__snapshots__/lemon-ui-icons--shelf-b.png index 708f32b5e4aa3..7e81f11adc7f0 100644 Binary files a/frontend/__snapshots__/lemon-ui-icons--shelf-b.png and b/frontend/__snapshots__/lemon-ui-icons--shelf-b.png differ diff --git a/frontend/__snapshots__/lemon-ui-icons--shelf-i.png b/frontend/__snapshots__/lemon-ui-icons--shelf-i.png index c9f4b8b51b6b0..7673cc4d1c96d 100644 Binary files a/frontend/__snapshots__/lemon-ui-icons--shelf-i.png and b/frontend/__snapshots__/lemon-ui-icons--shelf-i.png differ diff --git a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png index 0dd5b12b4fe35..b79c4b316f022 100644 Binary files a/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png and b/frontend/__snapshots__/scenes-app-recordings--recordings-play-list-no-pinned-recordings.png differ diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 1842e4f2adb4f..099091755bc87 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -166,6 +166,7 @@ export const FEATURE_FLAGS = { SESSION_REPLAY_CORS_PROXY: 'session-replay-cors-proxy', // owner: #team-monitoring HOGQL_INSIGHTS: 'hogql-insights', // owner: @mariusandra WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline + SURVEYS_SITE_APP_DEPRECATION: 'surveys-site-app-deprecation', // owner: @neilkakkar } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/icons/icons.tsx b/frontend/src/lib/lemon-ui/icons/icons.tsx index d68e9c0934827..3ef6f11d75e1e 100644 --- a/frontend/src/lib/lemon-ui/icons/icons.tsx +++ b/frontend/src/lib/lemon-ui/icons/icons.tsx @@ -1927,6 +1927,27 @@ export function IconMonitor(props: LemonIconProps): JSX.Element { ) } +/** Material Bold icon. */ +export function IconBold(props: LemonIconProps): JSX.Element { + return ( + + + + ) +} + +/** Material Italic icon. */ +export function IconItalic(props: LemonIconProps): JSX.Element { + return ( + + + + ) +} + /** Material CellPhone icon. */ export function IconPhone(props: LemonIconProps): JSX.Element { return ( diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts index ab120558a72c5..523179c5e4071 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.test.ts @@ -354,7 +354,6 @@ describe('filtersToQueryNode', () => { funnel_correlation_person_entity: { a: 1 }, funnel_correlation_person_converted: 'true', funnel_custom_steps: [1, 2, 3], - funnel_advanced: true, layout: FunnelLayout.horizontal, funnel_step: 1, entrance_period_start: 'abc', @@ -371,7 +370,6 @@ describe('filtersToQueryNode', () => { funnel_from_step: 1, funnel_to_step: 2, funnel_step_reference: FunnelStepReference.total, - funnel_step_breakdown: 1, breakdown_attribution_type: BreakdownAttributionType.AllSteps, breakdown_attribution_value: 1, bin_count: 'auto', @@ -384,14 +382,7 @@ describe('filtersToQueryNode', () => { funnel_to_step: 1, }, ], - funnel_correlation_person_entity: { a: 1 }, - funnel_correlation_person_converted: 'true', - funnel_custom_steps: [1, 2, 3], - funnel_advanced: true, layout: FunnelLayout.horizontal, - funnel_step: 1, - entrance_period_start: 'abc', - drop_off: true, hidden_legend_breakdowns: ['Chrome', 'Safari'], }, } @@ -465,9 +456,6 @@ describe('filtersToQueryNode', () => { funnel_filter: { a: 1 }, exclude_events: ['e', 'f'], step_limit: 1, - path_start_key: 'g', - path_end_key: 'h', - path_dropoff_key: 'i', path_replacements: true, local_path_cleaning_filters: [{ alias: 'home' }], edge_limit: 1, @@ -486,7 +474,6 @@ describe('filtersToQueryNode', () => { compare: true, show_legend: true, hidden_legend_keys: { 0: true, 10: true }, - stickiness_days: 2, shown_as: ShownAsValue.STICKINESS, display: ChartDisplayType.ActionsLineGraph, } @@ -499,7 +486,6 @@ describe('filtersToQueryNode', () => { compare: true, show_legend: true, hidden_legend_indexes: [0, 10], - stickiness_days: 2, shown_as: ShownAsValue.STICKINESS, display: ChartDisplayType.ActionsLineGraph, }, diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts index 83aea93627782..5137fbf5b2116 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts @@ -191,7 +191,6 @@ export const filtersToQueryNode = (filters: Partial): InsightQueryNo funnel_from_step: filters.funnel_from_step, funnel_to_step: filters.funnel_to_step, funnel_step_reference: filters.funnel_step_reference, - funnel_step_breakdown: filters.funnel_step_breakdown, breakdown_attribution_type: filters.breakdown_attribution_type, breakdown_attribution_value: filters.breakdown_attribution_value, bin_count: filters.bin_count, @@ -199,14 +198,7 @@ export const filtersToQueryNode = (filters: Partial): InsightQueryNo funnel_window_interval: filters.funnel_window_interval, funnel_order_type: filters.funnel_order_type, exclusions: filters.exclusions, - funnel_correlation_person_entity: filters.funnel_correlation_person_entity, - funnel_correlation_person_converted: filters.funnel_correlation_person_converted, - funnel_custom_steps: filters.funnel_custom_steps, - funnel_advanced: filters.funnel_advanced, layout: filters.layout, - funnel_step: filters.funnel_step, - entrance_period_start: filters.entrance_period_start, - drop_off: filters.drop_off, hidden_legend_breakdowns: cleanHiddenLegendSeries(filters.hidden_legend_keys), funnel_aggregate_by_hogql: filters.funnel_aggregate_by_hogql, }) @@ -238,9 +230,6 @@ export const filtersToQueryNode = (filters: Partial): InsightQueryNo funnel_filter: filters.funnel_filter, exclude_events: filters.exclude_events, step_limit: filters.step_limit, - path_start_key: filters.path_start_key, - path_end_key: filters.path_end_key, - path_dropoff_key: filters.path_dropoff_key, path_replacements: filters.path_replacements, local_path_cleaning_filters: filters.local_path_cleaning_filters, edge_limit: filters.edge_limit, @@ -256,7 +245,6 @@ export const filtersToQueryNode = (filters: Partial): InsightQueryNo compare: filters.compare, show_legend: filters.show_legend, hidden_legend_indexes: cleanHiddenLegendIndexes(filters.hidden_legend_keys), - stickiness_days: filters.stickiness_days, shown_as: filters.shown_as, show_values_on_series: filters.show_values_on_series, }) diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index ec123459bd42a..7edaeb0039cf1 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -528,20 +528,6 @@ }, "EmptyPropertyFilter": { "additionalProperties": false, - "properties": { - "key": { - "not": {} - }, - "operator": { - "not": {} - }, - "type": { - "not": {} - }, - "value": { - "not": {} - } - }, "type": "object" }, "EntityType": { @@ -860,15 +846,7 @@ "enum": ["second", "minute", "hour", "day", "week", "month"], "type": "string" }, - "FunnelLayout": { - "enum": ["horizontal", "vertical"], - "type": "string" - }, - "FunnelPathType": { - "enum": ["funnel_path_before_step", "funnel_path_between_steps", "funnel_path_after_step"], - "type": "string" - }, - "FunnelStepRangeEntityFilter": { + "FunnelExclusion": { "additionalProperties": false, "properties": { "custom_name": { @@ -898,6 +876,14 @@ }, "type": "object" }, + "FunnelLayout": { + "enum": ["horizontal", "vertical"], + "type": "string" + }, + "FunnelPathType": { + "enum": ["funnel_path_before_step", "funnel_path_between_steps", "funnel_path_after_step"], + "type": "string" + }, "FunnelStepReference": { "enum": ["total", "previous"], "type": "string" @@ -908,7 +894,7 @@ }, "FunnelsFilter": { "additionalProperties": false, - "description": "`FunnelsFilterType` minus everything inherited from `FilterType` and `hidden_legend_keys` replaced by `hidden_legend_breakdowns`", + "description": "`FunnelsFilterType` minus everything inherited from `FilterType` and persons modal related params and `hidden_legend_keys` replaced by `hidden_legend_breakdowns`", "properties": { "bin_count": { "$ref": "#/definitions/BinCountValue" @@ -919,65 +905,21 @@ "breakdown_attribution_value": { "type": "number" }, - "drop_off": { - "type": "boolean" - }, - "entrance_period_start": { - "type": "string" - }, "exclusions": { "items": { - "$ref": "#/definitions/FunnelStepRangeEntityFilter" + "$ref": "#/definitions/FunnelExclusion" }, "type": "array" }, - "funnel_advanced": { - "type": "boolean" - }, "funnel_aggregate_by_hogql": { "type": "string" }, - "funnel_correlation_person_converted": { - "enum": ["true", "false"], - "type": "string" - }, - "funnel_correlation_person_entity": { - "type": "object" - }, - "funnel_custom_steps": { - "items": { - "type": "number" - }, - "type": "array" - }, "funnel_from_step": { "type": "number" }, "funnel_order_type": { "$ref": "#/definitions/StepOrderValue" }, - "funnel_step": { - "type": "number" - }, - "funnel_step_breakdown": { - "anyOf": [ - { - "type": "string" - }, - { - "items": { - "type": "number" - }, - "type": "array" - }, - { - "type": "number" - }, - { - "type": "null" - } - ] - }, "funnel_step_reference": { "$ref": "#/definitions/FunnelStepReference" }, @@ -1486,7 +1428,7 @@ }, "PathsFilter": { "additionalProperties": false, - "description": "`PathsFilterType` minus everything inherited from `FilterType`", + "description": "`PathsFilterType` minus everything inherited from `FilterType` and persons modal related params", "properties": { "edge_limit": { "type": "number" @@ -1524,12 +1466,6 @@ "min_edge_weight": { "type": "number" }, - "path_dropoff_key": { - "type": "string" - }, - "path_end_key": { - "type": "string" - }, "path_groupings": { "items": { "type": "string" @@ -1539,9 +1475,6 @@ "path_replacements": { "type": "boolean" }, - "path_start_key": { - "type": "string" - }, "path_type": { "$ref": "#/definitions/PathType" }, @@ -2065,7 +1998,7 @@ }, "StickinessFilter": { "additionalProperties": false, - "description": "`StickinessFilterType` minus everything inherited from `FilterType` and `hidden_legend_keys` replaced by `hidden_legend_indexes`", + "description": "`StickinessFilterType` minus everything inherited from `FilterType` and persons modal related params and `hidden_legend_keys` replaced by `hidden_legend_indexes`", "properties": { "compare": { "type": "boolean" @@ -2087,9 +2020,6 @@ }, "shown_as": { "$ref": "#/definitions/ShownAsValue" - }, - "stickiness_days": { - "type": "number" } }, "type": "object" @@ -2283,6 +2213,9 @@ ], "description": "Property filters for all series" }, + "response": { + "$ref": "#/definitions/TrendsQueryResponse" + }, "samplingFactor": { "description": "Sampling rate", "type": ["number", "null"] @@ -2308,6 +2241,34 @@ }, "required": ["kind", "series"], "type": "object" + }, + "TrendsQueryResponse": { + "additionalProperties": false, + "properties": { + "is_cached": { + "type": "boolean" + }, + "last_refresh": { + "type": "string" + }, + "next_allowed_client_refresh": { + "type": "string" + }, + "result": { + "items": { + "type": "object" + }, + "type": "array" + }, + "timings": { + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["result"], + "type": "object" } } } diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 2d0ad2b90c137..54c296a3521a0 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -195,6 +195,7 @@ export interface ActionsNode extends EntityNode { kind: NodeKind.ActionsNode id: number } + export interface QueryTiming { /** Key. Shortened to 'k' to save on data. */ k: string @@ -374,6 +375,11 @@ export type TrendsFilter = Omit< TrendsFilterType & { hidden_legend_indexes?: number[] }, keyof FilterType | 'hidden_legend_keys' > + +export interface TrendsQueryResponse extends QueryResponse { + result: Record[] +} + export interface TrendsQuery extends InsightsQueryBase { kind: NodeKind.TrendsQuery /** Granularity of the response. Can be one of `hour`, `day`, `week` or `month` */ @@ -384,13 +390,22 @@ export interface TrendsQuery extends InsightsQueryBase { trendsFilter?: TrendsFilter /** Breakdown of the events and actions */ breakdown?: BreakdownFilter + response?: TrendsQueryResponse } -/** `FunnelsFilterType` minus everything inherited from `FilterType` and - * `hidden_legend_keys` replaced by `hidden_legend_breakdowns` */ +/** `FunnelsFilterType` minus everything inherited from `FilterType` and persons modal related params + * and `hidden_legend_keys` replaced by `hidden_legend_breakdowns` */ export type FunnelsFilter = Omit< FunnelsFilterType & { hidden_legend_breakdowns?: string[] }, - keyof FilterType | 'hidden_legend_keys' + | keyof FilterType + | 'hidden_legend_keys' + | 'funnel_step_breakdown' + | 'funnel_correlation_person_entity' + | 'funnel_correlation_person_converted' + | 'entrance_period_start' + | 'drop_off' + | 'funnel_step' + | 'funnel_custom_steps' > export interface FunnelsQuery extends InsightsQueryBase { kind: NodeKind.FunnelsQuery @@ -412,19 +427,22 @@ export interface RetentionQuery extends InsightsQueryBase { retentionFilter?: RetentionFilter } -/** `PathsFilterType` minus everything inherited from `FilterType` */ -export type PathsFilter = Omit +/** `PathsFilterType` minus everything inherited from `FilterType` and persons modal related params */ +export type PathsFilter = Omit< + PathsFilterType, + keyof FilterType | 'path_start_key' | 'path_end_key' | 'path_dropoff_key' +> export interface PathsQuery extends InsightsQueryBase { kind: NodeKind.PathsQuery /** Properties specific to the paths insight */ pathsFilter?: PathsFilter } -/** `StickinessFilterType` minus everything inherited from `FilterType` and - * `hidden_legend_keys` replaced by `hidden_legend_indexes` */ +/** `StickinessFilterType` minus everything inherited from `FilterType` and persons modal related params + * and `hidden_legend_keys` replaced by `hidden_legend_indexes` */ export type StickinessFilter = Omit< StickinessFilterType & { hidden_legend_indexes?: number[] }, - keyof FilterType | 'hidden_legend_keys' + keyof FilterType | 'hidden_legend_keys' | 'stickiness_days' > export interface StickinessQuery extends InsightsQueryBase { kind: NodeKind.StickinessQuery diff --git a/frontend/src/queries/utils.ts b/frontend/src/queries/utils.ts index 80514ecba711b..f7da1dacf865d 100644 --- a/frontend/src/queries/utils.ts +++ b/frontend/src/queries/utils.ts @@ -126,7 +126,7 @@ export function isLifecycleQuery(node?: Node | null): node is LifecycleQuery { } export function isQueryWithHogQLSupport(node?: Node | null): node is LifecycleQuery { - return isLifecycleQuery(node) + return isLifecycleQuery(node) || isTrendsQuery(node) } export function isInsightQueryWithDisplay(node?: Node | null): node is TrendsQuery | StickinessQuery { diff --git a/frontend/src/scenes/billing/billingLogic.ts b/frontend/src/scenes/billing/billingLogic.ts index 2fdc2aa0f56ee..9a0d1eff86ec7 100644 --- a/frontend/src/scenes/billing/billingLogic.ts +++ b/frontend/src/scenes/billing/billingLogic.ts @@ -77,7 +77,7 @@ export const billingLogic = kea([ return window.location.pathname.includes('/ingestion') ? urls.ingestion() + '/billing' : window.location.pathname.includes('/onboarding') - ? window.location.pathname + ? window.location.pathname + window.location.search : '' }, }, diff --git a/frontend/src/scenes/data-management/database/DatabaseTables.tsx b/frontend/src/scenes/data-management/database/DatabaseTables.tsx index d1f9b1ec50b01..1952603fd5845 100644 --- a/frontend/src/scenes/data-management/database/DatabaseTables.tsx +++ b/frontend/src/scenes/data-management/database/DatabaseTables.tsx @@ -88,7 +88,9 @@ export function DatabaseTables({ // TODO: Use `hogql` tag? query: `SELECT ${obj.columns .filter(({ table, fields, chain }) => !table && !fields && !chain) - .map(({ key }) => key)} FROM ${table} LIMIT 100`, + .map(({ key }) => key)} FROM ${ + table === 'numbers' ? 'numbers(0, 10)' : table + } LIMIT 100`, }, } return ( diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts index bedb0d0172e58..078fba4da275a 100644 --- a/frontend/src/scenes/funnels/funnelDataLogic.ts +++ b/frontend/src/scenes/funnels/funnelDataLogic.ts @@ -4,7 +4,7 @@ import { FunnelResultType, FunnelVizType, FunnelStep, - FunnelStepRangeEntityFilter, + FunnelExclusion, FunnelStepReference, FunnelStepWithNestedBreakdown, InsightLogicProps, @@ -381,7 +381,7 @@ export const funnelDataLogic = kea([ // Exclusion filters exclusionDefaultStepRange: [ (s) => [s.querySource], - (querySource: FunnelsQuery): Omit => ({ + (querySource: FunnelsQuery): Omit => ({ funnel_from_step: 0, funnel_to_step: (querySource.series || []).length > 1 ? querySource.series.length - 1 : 1, }), diff --git a/frontend/src/scenes/funnels/funnelUtils.test.ts b/frontend/src/scenes/funnels/funnelUtils.test.ts index 16b92f99941aa..fac6a2b82f0cd 100644 --- a/frontend/src/scenes/funnels/funnelUtils.test.ts +++ b/frontend/src/scenes/funnels/funnelUtils.test.ts @@ -13,7 +13,7 @@ import { FunnelCorrelation, FunnelCorrelationResultsType, FunnelCorrelationType, - FunnelStepRangeEntityFilter, + FunnelExclusion, } from '~/types' import { dayjs } from 'lib/dayjs' @@ -175,7 +175,7 @@ describe('getClampedStepRangeFilter', () => { const stepRange = { funnel_from_step: 0, funnel_to_step: 1, - } as FunnelStepRangeEntityFilter + } as FunnelExclusion const filters = { funnel_from_step: 1, funnel_to_step: 2, @@ -193,7 +193,7 @@ describe('getClampedStepRangeFilter', () => { }) it('ensures step range is clamped to step range', () => { - const stepRange = {} as FunnelStepRangeEntityFilter + const stepRange = {} as FunnelExclusion const filters = { funnel_from_step: -1, funnel_to_step: 12, @@ -211,7 +211,7 @@ describe('getClampedStepRangeFilter', () => { }) it('returns undefined if the incoming filters are undefined', () => { - const stepRange = {} as FunnelStepRangeEntityFilter + const stepRange = {} as FunnelExclusion const filters = { funnel_from_step: undefined, funnel_to_step: undefined, diff --git a/frontend/src/scenes/funnels/funnelUtils.ts b/frontend/src/scenes/funnels/funnelUtils.ts index 46f50052b226d..8dfc6a0539e73 100644 --- a/frontend/src/scenes/funnels/funnelUtils.ts +++ b/frontend/src/scenes/funnels/funnelUtils.ts @@ -1,6 +1,6 @@ import { autoCaptureEventToDescription, clamp } from 'lib/utils' import { - FunnelStepRangeEntityFilter, + FunnelExclusion, FunnelStep, FunnelStepWithNestedBreakdown, BreakdownKeyType, @@ -225,9 +225,7 @@ export const isStepsEmpty = (filters: FunnelsFilterType): boolean => export const isStepsUndefined = (filters: FunnelsFilterType): boolean => typeof filters.events === 'undefined' && (typeof filters.actions === 'undefined' || filters.actions.length === 0) -export const deepCleanFunnelExclusionEvents = ( - filters: FunnelsFilterType -): FunnelStepRangeEntityFilter[] | undefined => { +export const deepCleanFunnelExclusionEvents = (filters: FunnelsFilterType): FunnelExclusion[] | undefined => { if (!filters.exclusions) { return undefined } @@ -255,9 +253,9 @@ export const getClampedStepRangeFilter = ({ stepRange, filters, }: { - stepRange?: FunnelStepRangeEntityFilter + stepRange?: FunnelExclusion filters: FunnelsFilterType -}): FunnelStepRangeEntityFilter => { +}): FunnelExclusion => { const maxStepIndex = Math.max((filters.events?.length || 0) + (filters.actions?.length || 0) - 1, 1) let funnel_from_step = findFirstNumber([stepRange?.funnel_from_step, filters.funnel_from_step]) @@ -282,9 +280,9 @@ export const getClampedStepRangeFilterDataExploration = ({ stepRange, query, }: { - stepRange?: FunnelStepRangeEntityFilter + stepRange?: FunnelExclusion query: FunnelsQuery -}): FunnelStepRangeEntityFilter => { +}): FunnelExclusion => { const maxStepIndex = Math.max(query.series.length || 0 - 1, 1) let funnel_from_step = findFirstNumber([stepRange?.funnel_from_step, query.funnelsFilter?.funnel_from_step]) diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx index ea099b6ec9764..adcbb55787bb9 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilter.tsx @@ -3,13 +3,7 @@ import React, { useEffect } from 'react' import { BindLogic, useActions, useValues } from 'kea' import { entityFilterLogic, toFilters, LocalFilter } from './entityFilterLogic' import { ActionFilterRow, MathAvailability } from './ActionFilterRow/ActionFilterRow' -import { - ActionFilter as ActionFilterType, - FilterType, - FunnelStepRangeEntityFilter, - InsightType, - Optional, -} from '~/types' +import { ActionFilter as ActionFilterType, FilterType, FunnelExclusion, InsightType, Optional } from '~/types' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { RenameModal } from 'scenes/insights/filters/ActionFilter/RenameModal' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' @@ -55,11 +49,7 @@ export interface ActionFilterProps { customRowSuffix?: | string | JSX.Element - | ((props: { - filter: ActionFilterType | FunnelStepRangeEntityFilter - index: number - onClose: () => void - }) => JSX.Element) + | ((props: { filter: ActionFilterType | FunnelExclusion; index: number; onClose: () => void }) => JSX.Element) /** Show nested arrows to the left of property filter buttons */ showNestedArrow?: boolean /** Which tabs to show for actions selector */ diff --git a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx index 90e01a83d6df5..921f18b586f22 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx +++ b/frontend/src/scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow.tsx @@ -5,7 +5,7 @@ import { ActionFilter, EntityType, EntityTypes, - FunnelStepRangeEntityFilter, + FunnelExclusion, PropertyFilterValue, BaseMathType, PropertyMathType, @@ -89,11 +89,7 @@ export interface ActionFilterRowProps { customRowSuffix?: | string | JSX.Element - | ((props: { - filter: ActionFilterType | FunnelStepRangeEntityFilter - index: number - onClose: () => void - }) => JSX.Element) // Custom suffix element to show in each row + | ((props: { filter: ActionFilterType | FunnelExclusion; index: number; onClose: () => void }) => JSX.Element) // Custom suffix element to show in each row hasBreakdown: boolean // Whether the current graph has a breakdown filter applied showNestedArrow?: boolean // Show nested arrows to the left of property filter buttons actionsTaxonomicGroupTypes?: TaxonomicFilterGroupType[] // Which tabs to show for actions selector diff --git a/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/ExclusionRowSuffix.tsx b/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/ExclusionRowSuffix.tsx index 3c657491b1134..fbb1f61619023 100644 --- a/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/ExclusionRowSuffix.tsx +++ b/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/ExclusionRowSuffix.tsx @@ -1,7 +1,7 @@ import { Row, Select } from 'antd' import { useActions, useValues } from 'kea' import { ANTD_TOOLTIP_PLACEMENTS } from 'lib/utils' -import { FunnelStepRangeEntityFilter, ActionFilter as ActionFilterType, FunnelsFilterType } from '~/types' +import { FunnelExclusion, ActionFilter as ActionFilterType, FunnelsFilterType } from '~/types' import { insightLogic } from 'scenes/insights/insightLogic' import { LemonButton } from '@posthog/lemon-ui' import { IconDelete } from 'lib/lemon-ui/icons' @@ -10,7 +10,7 @@ import { FunnelsQuery } from '~/queries/schema' import { getClampedStepRangeFilterDataExploration } from 'scenes/funnels/funnelUtils' type ExclusionRowSuffixComponentBaseProps = { - filter: ActionFilterType | FunnelStepRangeEntityFilter + filter: ActionFilterType | FunnelExclusion index: number onClose?: () => void isVertical: boolean @@ -28,7 +28,7 @@ export function ExclusionRowSuffix({ ) const { updateInsightFilter } = useActions(funnelDataLogic(insightProps)) - const setOneEventExclusionFilter = (eventFilter: FunnelStepRangeEntityFilter, index: number): void => { + const setOneEventExclusionFilter = (eventFilter: FunnelExclusion, index: number): void => { const exclusions = ((insightFilter as FunnelsFilterType)?.exclusions || []).map((e, e_i) => e_i === index ? getClampedStepRangeFilterDataExploration({ diff --git a/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/FunnelExclusionsFilter.tsx b/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/FunnelExclusionsFilter.tsx index 9bb147c049967..97e93ecf02702 100644 --- a/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/FunnelExclusionsFilter.tsx +++ b/frontend/src/scenes/insights/filters/FunnelExclusionsFilter/FunnelExclusionsFilter.tsx @@ -3,7 +3,7 @@ import { useActions, useValues } from 'kea' import useSize from '@react-hook/size' import { ActionFilter } from 'scenes/insights/filters/ActionFilter/ActionFilter' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' -import { FunnelStepRangeEntityFilter, EntityTypes, FilterType } from '~/types' +import { FunnelExclusion, EntityTypes, FilterType } from '~/types' import { insightLogic } from 'scenes/insights/insightLogic' import { MathAvailability } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { funnelDataLogic } from 'scenes/funnels/funnelDataLogic' @@ -22,7 +22,7 @@ export function FunnelExclusionsFilter(): JSX.Element { const isVerticalLayout = !!width && width < 450 // If filter container shrinks below 500px, initiate verticality const setFilters = (filters: Partial): void => { - const exclusions = (filters.events as FunnelStepRangeEntityFilter[]).map((e) => ({ + const exclusions = (filters.events as FunnelExclusion[]).map((e) => ({ ...e, funnel_from_step: e.funnel_from_step || exclusionDefaultStepRange.funnel_from_step, funnel_to_step: e.funnel_to_step || exclusionDefaultStepRange.funnel_to_step, diff --git a/frontend/src/scenes/insights/utils/cleanFilters.ts b/frontend/src/scenes/insights/utils/cleanFilters.ts index 918b31758b712..bd16e3ca1ba79 100644 --- a/frontend/src/scenes/insights/utils/cleanFilters.ts +++ b/frontend/src/scenes/insights/utils/cleanFilters.ts @@ -232,7 +232,6 @@ export function cleanFilters( ...(filters.funnel_window_interval ? { funnel_window_interval: filters.funnel_window_interval } : {}), ...(filters.funnel_order_type ? { funnel_order_type: filters.funnel_order_type } : {}), ...(filters.hidden_legend_keys ? { hidden_legend_keys: filters.hidden_legend_keys } : {}), - ...(filters.funnel_advanced ? { funnel_advanced: filters.funnel_advanced } : {}), ...(filters.funnel_aggregate_by_hogql ? { funnel_aggregate_by_hogql: filters.funnel_aggregate_by_hogql } : {}), diff --git a/frontend/src/scenes/instance/SystemStatus/index.tsx b/frontend/src/scenes/instance/SystemStatus/index.tsx index 11adb42107c21..6bee341d1df3a 100644 --- a/frontend/src/scenes/instance/SystemStatus/index.tsx +++ b/frontend/src/scenes/instance/SystemStatus/index.tsx @@ -5,7 +5,7 @@ import { systemStatusLogic, InstanceStatusTabName } from './systemStatusLogic' import { useActions, useValues } from 'kea' import { PageHeader } from 'lib/components/PageHeader' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' -import { IconOpenInNew } from 'lib/lemon-ui/icons' +import { IconInfo, IconOpenInNew } from 'lib/lemon-ui/icons' import { OverviewTab } from 'scenes/instance/SystemStatus/OverviewTab' import { InternalMetricsTab } from 'scenes/instance/SystemStatus/InternalMetricsTab' import { SceneExport } from 'scenes/sceneTypes' @@ -17,6 +17,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { FEATURE_FLAGS } from 'lib/constants' import { KafkaInspectorTab } from './KafkaInspectorTab' import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs' +import { Tooltip } from 'lib/lemon-ui/Tooltip' export const scene: SceneExport = { component: SystemStatus, @@ -33,7 +34,11 @@ export function SystemStatus(): JSX.Element { let tabs = [ { key: 'overview', - label: 'System overview', + label: ( + System overview is cached for 60 seconds}> + System overview + + ), content: , }, ] as LemonTab[] diff --git a/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx b/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx index 39e5feaa4aead..52a66b633af6d 100644 --- a/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx +++ b/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx @@ -1,7 +1,6 @@ -import { Mark, mergeAttributes } from '@tiptap/core' +import { Mark, getMarkRange, mergeAttributes } from '@tiptap/core' import { linkPasteRule } from '../Nodes/utils' import { Plugin, PluginKey } from '@tiptap/pm/state' -import { router } from 'kea-router' export const NotebookMarkLink = Mark.create({ name: 'link', @@ -30,28 +29,26 @@ export const NotebookMarkLink = Mark.create({ }, addProseMirrorPlugins() { + const { editor, type: markType } = this return [ new Plugin({ key: new PluginKey('handleLinkClick'), props: { handleDOMEvents: { - click(view, event) { - if (event.button !== 0) { - return false - } - - const link = event.target as HTMLAnchorElement - - const href = link.href + click(_, event) { + if (event.metaKey) { + const link = event.target as HTMLAnchorElement + const href = link.href - if (link && href && !view.editable) { - event.preventDefault() - - if (isPostHogLink(href)) { - router.actions.push(link.pathname) - } else { + if (href) { + event.preventDefault() window.open(href, link.target) } + } else { + const range = getMarkRange(editor.state.selection.$anchor, markType) + if (range) { + editor.commands.setTextSelection(range) + } } }, }, diff --git a/frontend/src/scenes/notebooks/Notebook/Editor.tsx b/frontend/src/scenes/notebooks/Notebook/Editor.tsx index 0f4e83aa16485..e814d4a314023 100644 --- a/frontend/src/scenes/notebooks/Notebook/Editor.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Editor.tsx @@ -30,6 +30,7 @@ import { SlashCommandsExtension } from './SlashCommands' import { BacklinkCommandsExtension } from './BacklinkCommands' import { NotebookNodeEarlyAccessFeature } from '../Nodes/NotebookNodeEarlyAccessFeature' import { NotebookNodeSurvey } from '../Nodes/NotebookNodeSurvey' +import { InlineMenu } from './InlineMenu' const CustomDocument = ExtensionDocument.extend({ content: 'heading block*', @@ -223,6 +224,7 @@ export function Editor({ <> {_editor && } + {_editor && } ) } diff --git a/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx b/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx new file mode 100644 index 0000000000000..7a2e837c16853 --- /dev/null +++ b/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx @@ -0,0 +1,65 @@ +import { LemonButton, LemonInput } from '@posthog/lemon-ui' +import { Editor } from '@tiptap/core' +import { BubbleMenu } from '@tiptap/react' +import { IconBold, IconDelete, IconItalic, IconLink, IconOpenInNew } from 'lib/lemon-ui/icons' + +export const InlineMenu = ({ editor }: { editor: Editor }): JSX.Element => { + const { href, target } = editor.getAttributes('link') + + const setLink = (href: string): void => { + editor.commands.setMark('link', { href: href }) + } + + const openLink = (): void => { + window.open(href, target) + } + + return ( + +
+ {editor.isActive('link') ? ( + <> + + } status="primary" size="small" /> + editor.chain().focus().unsetMark('link').run()} + icon={} + status="danger" + size="small" + /> + + ) : ( + <> + editor.chain().focus().toggleMark('bold').run()} + active={editor.isActive('bold')} + icon={} + size="small" + status={editor.isActive('bold') ? 'primary' : 'stealth'} + /> + editor.chain().focus().toggleMark('italic').run()} + active={editor.isActive('italic')} + icon={} + status={editor.isActive('italic') ? 'primary' : 'stealth'} + size="small" + /> + editor.chain().focus().setMark('link').run()} + icon={} + status="stealth" + size="small" + /> + + )} +
+
+ ) +} diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index 5433abc22cff6..03675b8da8b88 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -152,6 +152,24 @@ } } + .NotebookInlineMenu { + margin-bottom: -0.2rem; + box-shadow: 0px 4px 10px 0px rgba(0, 0, 0, 0.1); + + .LemonInput { + border: 0px; + min-height: 0px; + } + + .LemonButton { + min-height: 1.75rem; + + .LemonButton__icon { + font-size: 1rem; + } + } + } + .NotebookNodeSettings__widgets { &__content { max-height: calc(100vh - 220px); diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index 41b299c417f36..3cf1c4989e4c1 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -11,6 +11,10 @@ import { ProductKey } from '~/types' import { ProductAnalyticsSDKInstructions } from './sdks/product-analytics/ProductAnalyticsSDKInstructions' import { SessionReplaySDKInstructions } from './sdks/session-replay/SessionReplaySDKInstructions' import { OnboardingBillingStep } from './OnboardingBillingStep' +import { OnboardingOtherProductsStep } from './OnboardingOtherProductsStep' +import { teamLogic } from 'scenes/teamLogic' +import { OnboardingVerificationStep } from './OnboardingVerificationStep' +import { FeatureFlagsSDKInstructions } from './sdks/feature-flags/FeatureFlagsSDKInstructions' export const scene: SceneExport = { component: Onboarding, @@ -20,7 +24,7 @@ export const scene: SceneExport = { /** * Wrapper for custom onboarding content. This automatically includes the product intro and billing step. */ -const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Element => { +const OnboardingWrapper = ({ children, onStart }: { children: React.ReactNode; onStart?: () => void }): JSX.Element => { const { currentOnboardingStepNumber, shouldShowBillingStep } = useValues(onboardingLogic) const { setAllOnboardingSteps } = useActions(onboardingLogic) const { product } = useValues(onboardingLogic) @@ -42,7 +46,8 @@ const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Ele } const createAllSteps = (): void => { - const ProductIntro = + const ProductIntro = + const OtherProductsStep = let steps = [] if (Array.isArray(children)) { steps = [ProductIntro, ...children] @@ -53,6 +58,7 @@ const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Ele const BillingStep = steps = [...steps, BillingStep] } + steps = [...steps, OtherProductsStep] setAllSteps(steps) } @@ -63,22 +69,36 @@ const ProductAnalyticsOnboarding = (): JSX.Element => { return ( + ) } const SessionReplayOnboarding = (): JSX.Element => { + const { updateCurrentTeam } = useActions(teamLogic) return ( - + { + updateCurrentTeam({ + session_recording_opt_in: true, + capture_console_log_opt_in: true, + capture_performance_opt_in: true, + }) + }} + > ) } const FeatureFlagsOnboarding = (): JSX.Element => { - return {/* */} + return ( + + + + ) } export function Onboarding(): JSX.Element | null { diff --git a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx index 510f91ebcdf8e..6daba12e33fe6 100644 --- a/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx +++ b/frontend/src/scenes/onboarding/OnboardingBillingStep.tsx @@ -50,7 +50,7 @@ export const OnboardingBillingStep = ({ product }: { product: BillingProductV2Ty

Subscribe successful

-

You're all ready to use PostHog.

+

You're all ready to use {product.name}.

diff --git a/frontend/src/scenes/onboarding/OnboardingOtherProductsStep.tsx b/frontend/src/scenes/onboarding/OnboardingOtherProductsStep.tsx new file mode 100644 index 0000000000000..56102c51a7646 --- /dev/null +++ b/frontend/src/scenes/onboarding/OnboardingOtherProductsStep.tsx @@ -0,0 +1,54 @@ +import { LemonButton, LemonCard } from '@posthog/lemon-ui' +import { OnboardingStep } from './OnboardingStep' +import { onboardingLogic } from './onboardingLogic' +import { useActions, useValues } from 'kea' +import { urls } from 'scenes/urls' + +export const OnboardingOtherProductsStep = (): JSX.Element => { + const { product, suggestedProducts } = useValues(onboardingLogic) + const { completeOnboarding } = useActions(onboardingLogic) + if (suggestedProducts.length === 0) { + completeOnboarding() + } + + return ( + } + > +
+ {suggestedProducts?.map((suggestedProduct) => ( + +
+
+ {suggestedProduct.name} +
+
+

{suggestedProduct.name}

+

{suggestedProduct.description}

+
+
+
+ completeOnboarding(urls.onboarding(suggestedProduct.type))} + > + Get started + +
+
+ ))} +
+
+ ) +} diff --git a/frontend/src/scenes/onboarding/OnboardingProductIntro.tsx b/frontend/src/scenes/onboarding/OnboardingProductIntro.tsx index 144f8d2d82128..4cdb535e242f7 100644 --- a/frontend/src/scenes/onboarding/OnboardingProductIntro.tsx +++ b/frontend/src/scenes/onboarding/OnboardingProductIntro.tsx @@ -9,7 +9,13 @@ import { ProductPricingModal } from 'scenes/billing/ProductPricingModal' import { IconArrowLeft, IconCheckCircleOutline, IconOpenInNew } from 'lib/lemon-ui/icons' import { urls } from 'scenes/urls' -export const OnboardingProductIntro = ({ product }: { product: BillingProductV2Type }): JSX.Element => { +export const OnboardingProductIntro = ({ + product, + onStart, +}: { + product: BillingProductV2Type + onStart?: () => void +}): JSX.Element => { const { currentAndUpgradePlans, isPricingModalOpen } = useValues(billingProductLogic({ product })) const { toggleIsPricingModalOpen } = useActions(billingProductLogic({ product })) const { setCurrentOnboardingStepNumber } = useActions(onboardingLogic) @@ -52,7 +58,10 @@ export const OnboardingProductIntro = ({ product }: { product: BillingProductV2T
setCurrentOnboardingStepNumber(currentOnboardingStepNumber + 1)} + onClick={() => { + onStart && onStart() + setCurrentOnboardingStepNumber(currentOnboardingStepNumber + 1) + }} > Get started diff --git a/frontend/src/scenes/onboarding/OnboardingStep.tsx b/frontend/src/scenes/onboarding/OnboardingStep.tsx index 12d523eb78330..b32c9fdc13a3d 100644 --- a/frontend/src/scenes/onboarding/OnboardingStep.tsx +++ b/frontend/src/scenes/onboarding/OnboardingStep.tsx @@ -9,16 +9,19 @@ export const OnboardingStep = ({ subtitle, children, showSkip = false, + onSkip, continueOverride, }: { title: string subtitle?: string children: React.ReactNode showSkip?: boolean + onSkip?: () => void continueOverride?: JSX.Element }): JSX.Element => { const { currentOnboardingStepNumber, totalOnboardingSteps } = useValues(onboardingLogic) const { setCurrentOnboardingStepNumber, completeOnboarding } = useActions(onboardingLogic) + const isLastStep = currentOnboardingStepNumber == totalOnboardingSteps return ( - currentOnboardingStepNumber == totalOnboardingSteps + onClick={() => { + onSkip && onSkip() + isLastStep ? completeOnboarding() : setCurrentOnboardingStepNumber(currentOnboardingStepNumber + 1) - } + }} status="muted" > - Skip for now + Skip {isLastStep ? 'and finish' : 'for now'} )} {continueOverride ? ( diff --git a/frontend/src/scenes/onboarding/OnboardingVerificationStep.tsx b/frontend/src/scenes/onboarding/OnboardingVerificationStep.tsx new file mode 100644 index 0000000000000..7b55f2f139bd2 --- /dev/null +++ b/frontend/src/scenes/onboarding/OnboardingVerificationStep.tsx @@ -0,0 +1,51 @@ +import { Spinner } from '@posthog/lemon-ui' +import { OnboardingStep } from './OnboardingStep' +import { useActions, useValues } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { eventUsageLogic } from 'lib/utils/eventUsageLogic' +import { useInterval } from 'lib/hooks/useInterval' +import { BlushingHog } from 'lib/components/hedgehogs' +import { capitalizeFirstLetter } from 'lib/utils' + +export const OnboardingVerificationStep = ({ + listeningForName, + teamPropertyToVerify, +}: { + listeningForName: string + teamPropertyToVerify: string +}): JSX.Element => { + const { loadCurrentTeam } = useActions(teamLogic) + const { currentTeam } = useValues(teamLogic) + const { reportIngestionContinueWithoutVerifying } = useActions(eventUsageLogic) + + useInterval(() => { + if (!currentTeam?.[teamPropertyToVerify]) { + loadCurrentTeam() + } + }, 2000) + + return !currentTeam?.[teamPropertyToVerify] ? ( + { + reportIngestionContinueWithoutVerifying() + }} + continueOverride={<>} + > +
+ +
+
+ ) : ( + +
+ +
+
+ ) +} diff --git a/frontend/src/scenes/onboarding/onboardingLogic.tsx b/frontend/src/scenes/onboarding/onboardingLogic.tsx index 104779a1f4da4..3f38d75747981 100644 --- a/frontend/src/scenes/onboarding/onboardingLogic.tsx +++ b/frontend/src/scenes/onboarding/onboardingLogic.tsx @@ -4,27 +4,47 @@ import { urls } from 'scenes/urls' import type { onboardingLogicType } from './onboardingLogicType' import { billingLogic } from 'scenes/billing/billingLogic' +import { teamLogic } from 'scenes/teamLogic' export interface OnboardingLogicProps { productKey: ProductKey | null } + +export enum OnboardingStepKey { + PRODUCT_INTRO = 'product_intro', + SDKS = 'sdks', + BILLING = 'billing', + OTHER_PRODUCTS = 'other_products', + VERIFY = 'verify', +} + +export type OnboardingStepMap = Record + +const onboardingStepMap: OnboardingStepMap = { + [OnboardingStepKey.PRODUCT_INTRO]: 'OnboardingProductIntro', + [OnboardingStepKey.SDKS]: 'SDKs', + [OnboardingStepKey.BILLING]: 'OnboardingBillingStep', + [OnboardingStepKey.OTHER_PRODUCTS]: 'OnboardingOtherProductsStep', + [OnboardingStepKey.VERIFY]: 'OnboardingVerificationStep', +} + export type AllOnboardingSteps = JSX.Element[] export const onboardingLogic = kea({ props: {} as OnboardingLogicProps, path: ['scenes', 'onboarding', 'onboardingLogic'], connect: { - values: [billingLogic, ['billing']], - actions: [billingLogic, ['loadBillingSuccess']], + values: [billingLogic, ['billing'], teamLogic, ['currentTeam']], + actions: [billingLogic, ['loadBillingSuccess'], teamLogic, ['updateCurrentTeam']], }, actions: { setProduct: (product: BillingProductV2Type | null) => ({ product }), setProductKey: (productKey: string | null) => ({ productKey }), setCurrentOnboardingStepNumber: (currentOnboardingStepNumber: number) => ({ currentOnboardingStepNumber }), - completeOnboarding: true, + completeOnboarding: (redirectUri?: string) => ({ redirectUri }), setAllOnboardingSteps: (allOnboardingSteps: AllOnboardingSteps) => ({ allOnboardingSteps }), setStepKey: (stepKey: string) => ({ stepKey }), - setSubscribedDuringOnboarding: (subscribedDuringOnboarding) => ({ subscribedDuringOnboarding }), + setSubscribedDuringOnboarding: (subscribedDuringOnboarding: boolean) => ({ subscribedDuringOnboarding }), }, reducers: () => ({ productKey: [ @@ -51,6 +71,12 @@ export const onboardingLogic = kea({ setAllOnboardingSteps: (_, { allOnboardingSteps }) => allOnboardingSteps as AllOnboardingSteps, }, ], + stepKey: [ + '' as string, + { + setStepKey: (_, { stepKey }) => stepKey, + }, + ], onCompleteOnbardingRedirectUrl: [ urls.default() as string, { @@ -87,6 +113,17 @@ export const onboardingLogic = kea({ return !product?.subscribed || !hasAllAddons || subscribedDuringOnboarding }, ], + suggestedProducts: [ + (s) => [s.billing, s.product, s.currentTeam], + (billing, product, currentTeam) => + billing?.products?.filter( + (p) => + p.type !== product?.type && + !p.contact_support && + !p.inclusion_only && + !currentTeam?.has_completed_onboarding_for?.[p.type] + ) || [], + ], }, listeners: ({ actions, values }) => ({ loadBillingSuccess: () => { @@ -107,12 +144,86 @@ export const onboardingLogic = kea({ actions.setProduct(values.billing?.products.find((p) => p.type === values.productKey) || null) } }, - completeOnboarding: () => { - window.location.href = values.onCompleteOnbardingRedirectUrl + completeOnboarding: ({ redirectUri }) => { + if (values.productKey) { + // update the current team has_completed_onboarding_for field, only writing over the current product + actions.updateCurrentTeam({ + has_completed_onboarding_for: { + ...values.currentTeam?.has_completed_onboarding_for, + [values.productKey]: true, + }, + }) + } + window.location.href = redirectUri || values.onCompleteOnbardingRedirectUrl + }, + setAllOnboardingSteps: ({ allOnboardingSteps }) => { + // once we have the onboarding steps we need to make sure the step key is valid, + // and if so use it to set the step number. if not valid, remove it from the state. + // valid step keys are either numbers (used for unnamed steps) or keys from the onboardingStepMap. + // if it's a number, we try to convert it to a named step key using the onboardingStepMap. + let stepKey = values.stepKey + if (values.stepKey) { + if (parseInt(values.stepKey) > 0) { + // try to convert the step number to a step key + const stepName = allOnboardingSteps[parseInt(values.stepKey) - 1]?.type?.name + const newStepKey = Object.keys(onboardingStepMap).find((key) => onboardingStepMap[key] === stepName) + if (stepName && stepKey) { + stepKey = newStepKey || stepKey + actions.setStepKey(stepKey) + } + } + if (stepKey in onboardingStepMap) { + const stepIndex = allOnboardingSteps + .map((step) => step.type.name) + .indexOf(onboardingStepMap[stepKey as OnboardingStepKey]) + if (stepIndex > -1) { + actions.setCurrentOnboardingStepNumber(stepIndex + 1) + } else { + actions.setStepKey('') + actions.setCurrentOnboardingStepNumber(1) + } + } else if ( + // if it's a number, just use that and set the correct onboarding step number + parseInt(stepKey) > 1 && + allOnboardingSteps.length > 0 && + allOnboardingSteps[parseInt(stepKey) - 1] + ) { + actions.setCurrentOnboardingStepNumber(parseInt(stepKey)) + } + } + }, + setStepKey: ({ stepKey }) => { + // if the step key is invalid (doesn't exist in the onboardingStepMap or the allOnboardingSteps array) + // remove it from the state. Numeric step keys are also allowed, as long as they are a valid + // index for the allOnboardingSteps array. + if ( + stepKey && + values.allOnboardingSteps.length > 0 && + (!values.allOnboardingSteps.find( + (step) => step.type.name === onboardingStepMap[stepKey as OnboardingStepKey] + ) || + !values.allOnboardingSteps[parseInt(stepKey) - 1]) + ) { + actions.setStepKey('') + } }, }), - urlToAction: ({ actions }) => ({ - '/onboarding/:productKey': ({ productKey }, { success, upgraded }) => { + actionToUrl: ({ values }) => ({ + setCurrentOnboardingStepNumber: () => { + // when the current step number changes, update the url to reflect the new step + const stepName = values.allOnboardingSteps[values.currentOnboardingStepNumber - 1]?.type?.name + const stepKey = + Object.keys(onboardingStepMap).find((key) => onboardingStepMap[key] === stepName) || + values.currentOnboardingStepNumber.toString() + if (stepKey) { + return [`/onboarding/${values.productKey}`, { step: stepKey }] + } else { + return [`/onboarding/${values.productKey}`] + } + }, + }), + urlToAction: ({ actions, values }) => ({ + '/onboarding/:productKey': ({ productKey }, { success, upgraded, step }) => { if (!productKey) { window.location.href = urls.default() return @@ -120,8 +231,14 @@ export const onboardingLogic = kea({ if (success || upgraded) { actions.setSubscribedDuringOnboarding(true) } - actions.setProductKey(productKey) - actions.setCurrentOnboardingStepNumber(1) + if (productKey !== values.productKey) { + actions.setProductKey(productKey) + } + if (step && (step in onboardingStepMap || parseInt(step) > 0)) { + actions.setStepKey(step) + } else { + actions.setCurrentOnboardingStepNumber(1) + } }, }), }) diff --git a/frontend/src/scenes/onboarding/sdks/SDKs.tsx b/frontend/src/scenes/onboarding/sdks/SDKs.tsx index 737dfceacac1b..9ac4884dd5d40 100644 --- a/frontend/src/scenes/onboarding/sdks/SDKs.tsx +++ b/frontend/src/scenes/onboarding/sdks/SDKs.tsx @@ -1,4 +1,4 @@ -import { LemonButton, LemonDivider, LemonSelect } from '@posthog/lemon-ui' +import { LemonButton, LemonCard, LemonDivider, LemonSelect } from '@posthog/lemon-ui' import { sdksLogic } from './sdksLogic' import { useActions, useValues } from 'kea' import { OnboardingStep } from '../OnboardingStep' @@ -7,6 +7,7 @@ import { onboardingLogic } from '../onboardingLogic' import { useEffect } from 'react' import React from 'react' import { SDKInstructionsMap } from '~/types' +import { InviteMembersButton } from '~/layout/navigation/TopBar/SitePopover' export function SDKs({ usersAction, @@ -32,7 +33,7 @@ export function SDKs({ >
-
+
{showSourceOptionsSelect && ( ))} + +

Need help with this step?

+

Invite a team member to help you get set up.

+ +
{selectedSDK && productKey && !!sdkInstructionMap[selectedSDK.key] && (
diff --git a/frontend/src/scenes/onboarding/sdks/feature-flags/FeatureFlagsSDKInstructions.tsx b/frontend/src/scenes/onboarding/sdks/feature-flags/FeatureFlagsSDKInstructions.tsx new file mode 100644 index 0000000000000..6374992792b3e --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/feature-flags/FeatureFlagsSDKInstructions.tsx @@ -0,0 +1,8 @@ +import { SDKInstructionsMap, SDKKey } from '~/types' +import { JSWebInstructions, NextJSInstructions, ReactInstructions } from '.' + +export const FeatureFlagsSDKInstructions: SDKInstructionsMap = { + [SDKKey.JS_WEB]: JSWebInstructions, + [SDKKey.NEXT_JS]: NextJSInstructions, + [SDKKey.REACT]: ReactInstructions, +} diff --git a/frontend/src/scenes/onboarding/sdks/feature-flags/index.tsx b/frontend/src/scenes/onboarding/sdks/feature-flags/index.tsx new file mode 100644 index 0000000000000..27d9e5388d04d --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/feature-flags/index.tsx @@ -0,0 +1,3 @@ +export * from './js-web' +export * from './next-js' +export * from './react' diff --git a/frontend/src/scenes/onboarding/sdks/feature-flags/js-web.tsx b/frontend/src/scenes/onboarding/sdks/feature-flags/js-web.tsx new file mode 100644 index 0000000000000..8ef2865c3b834 --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/feature-flags/js-web.tsx @@ -0,0 +1,42 @@ +import { JSSnippet } from 'lib/components/JSSnippet' +import { LemonDivider } from 'lib/lemon-ui/LemonDivider' +import { CodeSnippet, Language } from 'lib/components/CodeSnippet' +import { useValues } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { JSInstallSnippet, SessionReplayFinalSteps } from '../shared-snippets' + +function JSSetupSnippet(): JSX.Element { + const { currentTeam } = useValues(teamLogic) + + return ( + + {[ + "import posthog from 'posthog-js'", + '', + `posthog.init('${currentTeam?.api_token}', { api_host: '${window.location.origin}' })`, + ].join('\n')} + + ) +} + +export function JSWebInstructions(): JSX.Element { + return ( + <> +

Option 1. Code snippet

+

+ Just add this snippet to your website within the <head> tag and we'll automatically + capture page views, sessions and all relevant interactions within your website. +

+ + +

Option 2. Javascript Library

+

Install the package

+ +

Initialize

+ + +

Final steps

+ + + ) +} diff --git a/frontend/src/scenes/onboarding/sdks/feature-flags/next-js.tsx b/frontend/src/scenes/onboarding/sdks/feature-flags/next-js.tsx new file mode 100644 index 0000000000000..cda978ee12166 --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/feature-flags/next-js.tsx @@ -0,0 +1,98 @@ +import { Link } from 'lib/lemon-ui/Link' +import { CodeSnippet, Language } from 'lib/components/CodeSnippet' +import { useValues } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { JSInstallSnippet, SessionReplayFinalSteps } from '../shared-snippets' + +function NextEnvVarsSnippet(): JSX.Element { + const { currentTeam } = useValues(teamLogic) + + return ( + + {[ + `NEXT_PUBLIC_POSTHOG_KEY=${currentTeam?.api_token}`, + `NEXT_PUBLIC_POSTHOG_HOST=${window.location.origin}`, + ].join('\n')} + + ) +} + +function NextPagesRouterCodeSnippet(): JSX.Element { + return ( + + {`// pages/_app.js +... +import posthog from 'posthog-js' // Import PostHog + +if (typeof window !== 'undefined') { // checks that we are client-side + posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, { + api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST || 'https://app.posthog.com', + loaded: (posthog) => { + if (process.env.NODE_ENV === 'development') posthog.debug() // debug mode in development + }, + }) +} + +export default function App({ Component, pageProps }) { + const router = useRouter() + ...`} + + ) +} + +function NextAppRouterCodeSnippet(): JSX.Element { + return ( + + {`// app/providers.js +'use client' +... +import posthog from 'posthog-js' + +if (typeof window !== 'undefined') { + posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, { + api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST, + }) +} +...`} + + ) +} + +export function NextJSInstructions(): JSX.Element { + return ( + <> +

Install posthog-js using your package manager

+ +

Add environment variables

+

+ Add your environment variables to your .env.local file and to your hosting provider (e.g. Vercel, + Netlify, AWS). You can find your project API key in your project settings. +

+

+ These values need to start with NEXT_PUBLIC_ to be accessible on the + client-side. +

+ + +

Initialize

+

With App router

+

+ If your Next.js app to uses the app router, you can + integrate PostHog by creating a providers file in your app folder. This is because the posthog-js + library needs to be initialized on the client-side using the Next.js{' '} + + 'use client' directive + + . +

+ +

With Pages router

+

+ If your Next.js app uses the pages router, you can + integrate PostHog at the root of your app (pages/_app.js). +

+ + + + ) +} diff --git a/frontend/src/scenes/onboarding/sdks/feature-flags/react.tsx b/frontend/src/scenes/onboarding/sdks/feature-flags/react.tsx new file mode 100644 index 0000000000000..86fdfc0f527c7 --- /dev/null +++ b/frontend/src/scenes/onboarding/sdks/feature-flags/react.tsx @@ -0,0 +1,64 @@ +import { CodeSnippet, Language } from 'lib/components/CodeSnippet' +import { useValues } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { JSInstallSnippet, SessionReplayFinalSteps } from '../shared-snippets' + +function ReactEnvVarsSnippet(): JSX.Element { + const { currentTeam } = useValues(teamLogic) + + return ( + + {[ + `REACT_APP_POSTHOG_PUBLIC_KEY=${currentTeam?.api_token}`, + `REACT_APP_PUBLIC_POSTHOG_HOST=${window.location.origin}`, + ].join('\n')} + + ) +} + +function ReactSetupSnippet(): JSX.Element { + return ( + + {`// src/index.js +import React from 'react'; +import ReactDOM from 'react-dom/client'; +import App from './App'; + +import { PostHogProvider} from 'posthog-js/react' + +const options = { + api_host: process.env.REACT_APP_PUBLIC_POSTHOG_HOST, +} + +const root = ReactDOM.createRoot(document.getElementById('root')); +root.render( + + + + + +);`} + + ) +} + +export function ReactInstructions(): JSX.Element { + return ( + <> +

Install the package

+ +

Add environment variables

+ +

Initialize

+

+ Integrate PostHog at the root of your app (src/index.js for the default{' '} + create-react-app). +

+ + + + ) +} diff --git a/frontend/src/scenes/plugins/tabs/apps/AppView.tsx b/frontend/src/scenes/plugins/tabs/apps/AppView.tsx index 68065108e66c8..6c69ccb728b10 100644 --- a/frontend/src/scenes/plugins/tabs/apps/AppView.tsx +++ b/frontend/src/scenes/plugins/tabs/apps/AppView.tsx @@ -82,7 +82,7 @@ export function AppView({ } > - openReorderModal()} noPadding> + {orderedIndex ? ( ) : ( diff --git a/frontend/src/scenes/plugins/tabs/apps/InstalledAppsReorderModal.tsx b/frontend/src/scenes/plugins/tabs/apps/InstalledAppsReorderModal.tsx index 84b2c9505b107..1e5e0ad81b897 100644 --- a/frontend/src/scenes/plugins/tabs/apps/InstalledAppsReorderModal.tsx +++ b/frontend/src/scenes/plugins/tabs/apps/InstalledAppsReorderModal.tsx @@ -3,17 +3,27 @@ import { useValues, useActions } from 'kea' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { LemonBadge, LemonButton } from '@posthog/lemon-ui' import { PluginTypeWithConfig } from 'scenes/plugins/types' -import { SortEndHandler, SortableContainer, SortableElement } from 'react-sortable-hoc' import { PluginImage } from 'scenes/plugins/plugin/PluginImage' +import { SortableContext, arrayMove, useSortable, verticalListSortingStrategy } from '@dnd-kit/sortable' +import { DndContext, DragEndEvent } from '@dnd-kit/core' +import { restrictToParentElement, restrictToVerticalAxis } from '@dnd-kit/modifiers' +import { CSS } from '@dnd-kit/utilities' const MinimalAppView = ({ plugin, order }: { plugin: PluginTypeWithConfig; order: number }): JSX.Element => { + const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id: plugin.id }) + return (
@@ -22,36 +32,34 @@ const MinimalAppView = ({ plugin, order }: { plugin: PluginTypeWithConfig; order ) } -const SortableAppView = SortableElement(MinimalAppView) - -const SortableAppList = SortableContainer(({ children }: { children: React.ReactNode }) => { - return {children} -}) - export function InstalledAppsReorderModal(): JSX.Element { const { reorderModalOpen, sortableEnabledPlugins, temporaryOrder, pluginConfigsLoading } = useValues(pluginsLogic) const { closeReorderModal, setTemporaryOrder, cancelRearranging, savePluginOrders } = useActions(pluginsLogic) - const onSortEnd: SortEndHandler = ({ oldIndex, newIndex }) => { - const cloned = [...sortableEnabledPlugins] - const [removed] = cloned.splice(oldIndex, 1) - cloned.splice(newIndex, 0, removed) - - const newTemporaryOrder = cloned.reduce((acc, plugin, index) => { - return { - ...acc, - [plugin.id]: index + 1, - } - }, {}) - - setTemporaryOrder(newTemporaryOrder, removed.id) - } - const onClose = (): void => { cancelRearranging() closeReorderModal() } + const handleDragEnd = ({ active, over }: DragEndEvent): void => { + const itemIds = sortableEnabledPlugins.map((item) => item.id) + + if (over && active.id !== over.id) { + const oldIndex = itemIds.indexOf(Number(active.id)) + const newIndex = itemIds.indexOf(Number(over.id)) + const newOrder = arrayMove(sortableEnabledPlugins, oldIndex, newIndex) + + const newTemporaryOrder = newOrder.reduce((acc, plugin, index) => { + return { + ...acc, + [plugin.id]: index + 1, + } + }, {}) + + setTemporaryOrder(newTemporaryOrder, Number(active.id)) + } + } + return ( } > -
- - {sortableEnabledPlugins.map((plugin, index) => ( - - ))} - +
+ + + {sortableEnabledPlugins.map((item, index) => ( + + ))} + +
) diff --git a/frontend/src/scenes/products/Products.tsx b/frontend/src/scenes/products/Products.tsx index 1b155f0974394..703e904d00a88 100644 --- a/frontend/src/scenes/products/Products.tsx +++ b/frontend/src/scenes/products/Products.tsx @@ -76,6 +76,8 @@ function ProductCard({ product }: { product: BillingProductV2Type }): JSX.Elemen export function Products(): JSX.Element { const { featureFlags } = useValues(featureFlagLogic) const { billing } = useValues(billingLogic) + const { currentTeam } = useValues(teamLogic) + const isFirstProduct = Object.keys(currentTeam?.has_completed_onboarding_for || {}).length === 0 const products = billing?.products || [] useEffect(() => { @@ -87,19 +89,27 @@ export function Products(): JSX.Element { return (
-

Pick your first product.

+

Pick your {isFirstProduct ? 'first' : 'next'} product.

- Pick your first product to get started with. You can set up any others you'd like later. + Pick your {isFirstProduct ? 'first' : 'next'} product to get started with. You can set up any others + you'd like later.

{products.length > 0 ? ( -
- {products - .filter((product) => !product.contact_support && !product.inclusion_only) - .map((product) => ( - - ))} -
+ <> +
+ {products + .filter((product) => !product.contact_support && !product.inclusion_only) + .map((product) => ( + + ))} +
+
+ + None of these + +
+ ) : ( )} diff --git a/frontend/src/scenes/project/Settings/Survey.tsx b/frontend/src/scenes/project/Settings/Survey.tsx new file mode 100644 index 0000000000000..a33f4cdd9cb13 --- /dev/null +++ b/frontend/src/scenes/project/Settings/Survey.tsx @@ -0,0 +1,20 @@ +import { LemonDivider, Link } from '@posthog/lemon-ui' +import { SurveySettings as BasicSurveySettings } from 'scenes/surveys/SurveySettings' +import { urls } from 'scenes/urls' + +export function SurveySettings(): JSX.Element { + return ( + <> +

+ Surveys +

+

+ Get qualitative and quantitative data on how your users are doing. Surveys are found in the{' '} + surveys page. +

+ + + + + ) +} diff --git a/frontend/src/scenes/project/Settings/index.tsx b/frontend/src/scenes/project/Settings/index.tsx index f56c12247aff7..03622977b85f6 100644 --- a/frontend/src/scenes/project/Settings/index.tsx +++ b/frontend/src/scenes/project/Settings/index.tsx @@ -36,6 +36,9 @@ import { IngestionInfo } from './IngestionInfo' import { ExtraTeamSettings } from './ExtraTeamSettings' import { WeekStartConfig } from './WeekStartConfig' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { SurveySettings } from './Survey' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' export const scene: SceneExport = { component: ProjectSettings, @@ -75,6 +78,7 @@ export function ProjectSettings(): JSX.Element { const { location } = useValues(router) const { user, hasAvailableFeature } = useValues(userLogic) const hasAdvancedPaths = user?.organization?.available_features?.includes(AvailableFeature.PATHS_ADVANCED) + const { featureFlags } = useValues(featureFlagLogic) useAnchor(location.hash) @@ -245,6 +249,7 @@ export function ProjectSettings(): JSX.Element { + {featureFlags[FEATURE_FLAGS.SURVEYS_SITE_APP_DEPRECATION] && } diff --git a/frontend/src/scenes/surveys/SurveySettings.tsx b/frontend/src/scenes/surveys/SurveySettings.tsx new file mode 100644 index 0000000000000..d371980f708af --- /dev/null +++ b/frontend/src/scenes/surveys/SurveySettings.tsx @@ -0,0 +1,64 @@ +import { useActions, useValues } from 'kea' +import { teamLogic } from 'scenes/teamLogic' +import { LemonSwitch, Link } from '@posthog/lemon-ui' +import { urls } from 'scenes/urls' +import { LemonDialog } from 'lib/lemon-ui/LemonDialog' + +export type SurveySettingsProps = { + inModal?: boolean +} + +export function SurveySettings({ inModal = false }: SurveySettingsProps): JSX.Element { + const { updateCurrentTeam } = useActions(teamLogic) + const { currentTeam } = useValues(teamLogic) + + return ( +
+
+ { + updateCurrentTeam({ + surveys_opt_in: checked, + }) + }} + label="Enable surveys popup" + bordered={!inModal} + fullWidth={inModal} + labelClassName={inModal ? 'text-base font-semibold' : ''} + checked={!!currentTeam?.surveys_opt_in} + /> + +

+ Please note your website needs to have the{' '} + PostHog snippet or the latest version of{' '} + + posthog-js + {' '} + directly installed. For more details, check out our{' '} + + docs + + . +

+
+
+ ) +} + +export function openSurveysSettingsDialog(): void { + LemonDialog.open({ + title: 'Surveys settings', + content: , + width: 600, + primaryButton: { + children: 'Done', + }, + }) +} diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx index 3f18b2df4e154..46032cf2d0709 100644 --- a/frontend/src/scenes/surveys/Surveys.tsx +++ b/frontend/src/scenes/surveys/Surveys.tsx @@ -16,6 +16,13 @@ import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductI import { userLogic } from 'scenes/userLogic' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { dayjs } from 'lib/dayjs' +import { VersionCheckerBanner } from 'lib/components/VersionChecker/VersionCheckerBanner' +import { teamLogic } from 'scenes/teamLogic' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { IconSettings } from 'lib/lemon-ui/icons' +import { openSurveysSettingsDialog } from './SurveySettings' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' export const scene: SceneExport = { component: Surveys, @@ -32,6 +39,10 @@ export function Surveys(): JSX.Element { const { nonArchivedSurveys, archivedSurveys, surveys, surveysLoading } = useValues(surveysLogic) const { deleteSurvey, updateSurvey } = useActions(surveysLogic) const { user } = useValues(userLogic) + const { featureFlags } = useValues(featureFlagLogic) + + const { currentTeam } = useValues(teamLogic) + const surveysPopupDisabled = currentTeam && !currentTeam?.surveys_opt_in const [tab, setSurveyTab] = useState(SurveysTabs.All) const shouldShowEmptyState = !surveysLoading && surveys.length === 0 @@ -61,6 +72,25 @@ export function Surveys(): JSX.Element { { key: SurveysTabs.Archived, label: 'Archived surveys' }, ]} /> + {featureFlags[FEATURE_FLAGS.SURVEYS_SITE_APP_DEPRECATION] && ( +
+ + + {surveysPopupDisabled ? ( + , + onClick: () => openSurveysSettingsDialog(), + children: 'Configure', + }} + > + Survey popups are currently disabled for this project. + + ) : null} +
+ )} {surveysLoading ? ( ) : ( diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index cdec952069295..3b8b55107870a 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -24,6 +24,8 @@ import { dayjs } from 'lib/dayjs' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' +import { featureFlagLogic as enabledFlagLogic } from 'lib/logic/featureFlagLogic' +import { FEATURE_FLAGS } from 'lib/constants' export interface NewSurvey extends Pick< @@ -175,7 +177,12 @@ export const surveyLogic = kea([ 'reportSurveyViewed', ], ], - values: [pluginsLogic, ['installedPlugins', 'loading as pluginsLoading', 'enabledPlugins']], + values: [ + pluginsLogic, + ['installedPlugins', 'loading as pluginsLoading', 'enabledPlugins'], + enabledFlagLogic, + ['featureFlags as enabledFlags'], + ], })), actions({ editingSurvey: (editing: boolean) => ({ editing }), @@ -313,12 +320,15 @@ export const surveyLogic = kea([ }, ], showSurveyAppWarning: [ - (s) => [s.survey, s.enabledPlugins, s.pluginsLoading], - (survey: Survey, enabledPlugins: PluginType[], pluginsLoading: boolean): boolean => { - return !!( - survey.type !== SurveyType.API && - !pluginsLoading && - !enabledPlugins.find((plugin) => plugin.name === 'Surveys app') + (s) => [s.survey, s.enabledPlugins, s.pluginsLoading, s.enabledFlags], + (survey: Survey, enabledPlugins: PluginType[], pluginsLoading: boolean, enabledFlags): boolean => { + return ( + !enabledFlags[FEATURE_FLAGS.SURVEYS_SITE_APP_DEPRECATION] && + !!( + survey.type !== SurveyType.API && + !pluginsLoading && + !enabledPlugins.find((plugin) => plugin.name === 'Surveys app') + ) ) }, ], diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 0d529adc829cd..516f61ea400be 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -343,6 +343,7 @@ export interface TeamType extends TeamBasicType { capture_console_log_opt_in: boolean capture_performance_opt_in: boolean autocapture_exceptions_opt_in: boolean + surveys_opt_in?: boolean autocapture_exceptions_errors_to_ignore: string[] test_account_filters: AnyPropertyFilter[] test_account_filters_default_checked: boolean @@ -586,10 +587,10 @@ export interface HogQLPropertyFilter extends BasePropertyFilter { } export interface EmptyPropertyFilter { - type?: undefined - value?: undefined - operator?: undefined - key?: undefined + type?: never + value?: never + operator?: never + key?: never } export type AnyPropertyFilter = @@ -786,8 +787,7 @@ export type EntityFilter = { order?: number } -// TODO: Separate FunnelStepRange and FunnelStepRangeEntity filter types -export interface FunnelStepRangeEntityFilter extends Partial { +export interface FunnelExclusion extends Partial { funnel_from_step?: number funnel_to_step?: number } @@ -1669,51 +1669,62 @@ export interface TrendsFilterType extends FilterType { // number of intervals, e.g. for a day interval, we may want to smooth over // 7 days to remove weekly variation. Smoothing is performed as a moving average. smoothing_intervals?: number + compare?: boolean + formula?: string + shown_as?: ShownAsValue + display?: ChartDisplayType + breakdown_histogram_bin_count?: number // trends breakdown histogram bin count + + // frontend only show_legend?: boolean // used to show/hide legend next to insights graph hidden_legend_keys?: Record // used to toggle visibilities in table and legend - compare?: boolean aggregation_axis_format?: AggregationAxisFormat // a fixed format like duration that needs calculation aggregation_axis_prefix?: string // a prefix to add to the aggregation axis e.g. £ aggregation_axis_postfix?: string // a postfix to add to the aggregation axis e.g. % - formula?: string - shown_as?: ShownAsValue - display?: ChartDisplayType show_values_on_series?: boolean show_percent_stack_view?: boolean - breakdown_histogram_bin_count?: number // trends breakdown histogram bin count } + export interface StickinessFilterType extends FilterType { compare?: boolean - show_legend?: boolean // used to show/hide legend next to insights graph - hidden_legend_keys?: Record // used to toggle visibilities in table and legend - stickiness_days?: number shown_as?: ShownAsValue display?: ChartDisplayType + + // frontend only + show_legend?: boolean // used to show/hide legend next to insights graph + hidden_legend_keys?: Record // used to toggle visibilities in table and legend show_values_on_series?: boolean + + // persons only + stickiness_days?: number } + export interface FunnelsFilterType extends FilterType { funnel_viz_type?: FunnelVizType // parameter sent to funnels API for time conversion code path funnel_from_step?: number // used in time to convert: initial step index to compute time to convert funnel_to_step?: number // used in time to convert: ending step index to compute time to convert - funnel_step_reference?: FunnelStepReference // whether conversion shown in graph should be across all steps or just from the previous step - funnel_step_breakdown?: string | number[] | number | null // used in steps breakdown: persons modal breakdown_attribution_type?: BreakdownAttributionType // funnels breakdown attribution type breakdown_attribution_value?: number // funnels breakdown attribution specific step value bin_count?: BinCountValue // used in time to convert: number of bins to show in histogram funnel_window_interval_unit?: FunnelConversionWindowTimeUnit // minutes, days, weeks, etc. for conversion window funnel_window_interval?: number | undefined // length of conversion window funnel_order_type?: StepOrderValue - exclusions?: FunnelStepRangeEntityFilter[] // used in funnel exclusion filters - funnel_correlation_person_entity?: Record // Funnel Correlation Persons Filter - funnel_correlation_person_converted?: 'true' | 'false' // Funnel Correlation Persons Converted - success or failure counts - funnel_custom_steps?: number[] // used to provide custom steps for which to get people in a funnel - primarily for correlation use - funnel_advanced?: boolean // used to toggle advanced options on or off + exclusions?: FunnelExclusion[] // used in funnel exclusion filters + funnel_aggregate_by_hogql?: string + + // frontend only layout?: FunnelLayout // used only for funnels - funnel_step?: number + funnel_step_reference?: FunnelStepReference // whether conversion shown in graph should be across all steps or just from the previous step + hidden_legend_keys?: Record // used to toggle visibilities in table and legend + + // persons only entrance_period_start?: string // this and drop_off is used for funnels time conversion date for the persons modal drop_off?: boolean - hidden_legend_keys?: Record // used to toggle visibilities in table and legend - funnel_aggregate_by_hogql?: string + funnel_step?: number + funnel_step_breakdown?: string | number[] | number | null // used in steps breakdown: persons modal + funnel_custom_steps?: number[] // used to provide custom steps for which to get people in a funnel - primarily for correlation use + funnel_correlation_person_entity?: Record // Funnel Correlation Persons Filter + funnel_correlation_person_converted?: 'true' | 'false' // Funnel Correlation Persons Converted - success or failure counts } export interface PathsFilterType extends FilterType { path_type?: PathType @@ -1726,14 +1737,16 @@ export interface PathsFilterType extends FilterType { funnel_filter?: Record // Funnel Filter used in Paths exclude_events?: string[] // Paths Exclusion type step_limit?: number // Paths Step Limit - path_start_key?: string // Paths People Start Key - path_end_key?: string // Paths People End Key - path_dropoff_key?: string // Paths People Dropoff Key path_replacements?: boolean local_path_cleaning_filters?: PathCleaningFilter[] edge_limit?: number | undefined // Paths edge limit min_edge_weight?: number | undefined // Paths max_edge_weight?: number | undefined // Paths + + // persons only + path_start_key?: string // Paths People Start Key + path_end_key?: string // Paths People End Key + path_dropoff_key?: string // Paths People Dropoff Key } export interface RetentionFilterType extends FilterType { retention_type?: RetentionType @@ -1745,6 +1758,8 @@ export interface RetentionFilterType extends FilterType { } export interface LifecycleFilterType extends FilterType { shown_as?: ShownAsValue + + // frontend only show_values_on_series?: boolean toggledLifecycles?: LifecycleToggle[] } diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 233b3d446d5cb..0f95d248d4675 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0015_add_verified_properties otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0350_add_notebook_text_content +posthog: 0351_team_surveys_opt_in sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/package.json b/package.json index 9e0ac7481ede1..c1a3772e89de9 100644 --- a/package.json +++ b/package.json @@ -126,7 +126,7 @@ "md5": "^2.3.0", "monaco-editor": "^0.39.0", "papaparse": "^5.4.1", - "posthog-js": "1.79.1", + "posthog-js": "1.81.1", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts b/playwright/e2e-vrt/layout/Navigation.spec.ts index 5c0258c33e694..2af80117af5ad 100644 --- a/playwright/e2e-vrt/layout/Navigation.spec.ts +++ b/playwright/e2e-vrt/layout/Navigation.spec.ts @@ -6,12 +6,14 @@ test.describe('Navigation', () => { test('App Page With Side Bar Hidden (Mobile)', async ({ storyPage }) => { await storyPage.resizeToMobile() await storyPage.goto(toId('Layout/Navigation', 'App Page With Side Bar Hidden')) + await storyPage.mainAppContent.waitFor() await storyPage.expectFullPageScreenshot() }) test('App Page With Side Bar Shown (Mobile)', async ({ storyPage }) => { await storyPage.resizeToMobile() await storyPage.goto(toId('Layout/Navigation', 'App Page With Side Bar Shown')) + await storyPage.mainAppContent.waitFor() await storyPage.expectFullPageScreenshot() }) }) diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png index 24af8b2279910..1be473f7dba29 100644 Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png differ diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index 02ee642c6648f..a6d9a373b4696 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -62,6 +62,7 @@ export function getDefaultConfig(): PluginsServerConfig { KAFKA_MAX_MESSAGE_BATCH_SIZE: isDevEnv() ? 0 : 900_000, KAFKA_FLUSH_FREQUENCY_MS: isTestEnv() ? 5 : 500, APP_METRICS_FLUSH_FREQUENCY_MS: isTestEnv() ? 5 : 20_000, + APP_METRICS_FLUSH_MAX_QUEUE_SIZE: isTestEnv() ? 5 : 1000, REDIS_URL: 'redis://127.0.0.1', POSTHOG_REDIS_PASSWORD: '', POSTHOG_REDIS_HOST: '', @@ -152,6 +153,7 @@ export function getDefaultConfig(): PluginsServerConfig { SESSION_RECORDING_REMOTE_FOLDER: 'session_recordings', SESSION_RECORDING_REDIS_PREFIX: '@posthog/replay/', SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION: false, + SESSION_RECORDING_PARALLEL_CONSUMPTION: false, POSTHOG_SESSION_RECORDING_REDIS_HOST: undefined, POSTHOG_SESSION_RECORDING_REDIS_PORT: undefined, } diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts index 62f3200c22cfb..cdc56f5efabdf 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/partition-locker.ts @@ -83,7 +83,7 @@ export class PartitionLocker { `PartitionLocker failed to claim keys. Waiting ${this.delay} before retrying...`, { id: this.consumerID, - blockingConsumers, + blockingConsumers: [...blockingConsumers], } ) await new Promise((r) => setTimeout(r, this.delay)) @@ -131,6 +131,7 @@ export class PartitionLocker { keys, }, }) + throw error } } } diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/realtime-manager.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/realtime-manager.ts index 7571ed0835f53..e2c4d50d79bf9 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/services/realtime-manager.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/services/realtime-manager.ts @@ -66,6 +66,7 @@ export class RealtimeManager extends EventEmitter { ) this.pubsubRedis?.disconnect() + this.pubsubRedis = undefined } private async run(description: string, fn: (client: Redis) => Promise): Promise { diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts index 939df4cf80f0f..f783b7390bc7e 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts @@ -13,6 +13,7 @@ import { PipelineEvent, PluginsServerConfig, RawEventMessage, RedisPool, TeamId import { BackgroundRefresher } from '../../../utils/background-refresher' import { PostgresRouter } from '../../../utils/db/postgres' import { status } from '../../../utils/status' +import { createRedisPool } from '../../../utils/utils' import { fetchTeamTokensWithRecordings } from '../../../worker/ingestion/team-manager' import { ObjectStorage } from '../../services/object_storage' import { addSentryBreadcrumbsEventListeners } from '../kafka-metrics' @@ -94,6 +95,7 @@ type PartitionMetrics = { } export class SessionRecordingIngesterV2 { + redisPool: RedisPool sessions: Record = {} offsetHighWaterMarker: OffsetHighWaterMarker realtimeManager: RealtimeManager @@ -104,28 +106,31 @@ export class SessionRecordingIngesterV2 { partitionLockInterval: NodeJS.Timer | null = null teamsRefresher: BackgroundRefresher> offsetsRefresher: BackgroundRefresher> - recordingConsumerConfig: PluginsServerConfig + config: PluginsServerConfig topic = KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS + private promises: Set> = new Set() + constructor( - private serverConfig: PluginsServerConfig, + globalServerConfig: PluginsServerConfig, private postgres: PostgresRouter, - private objectStorage: ObjectStorage, - private redisPool: RedisPool + private objectStorage: ObjectStorage ) { - this.recordingConsumerConfig = sessionRecordingConsumerConfig(this.serverConfig) - this.realtimeManager = new RealtimeManager(this.redisPool, this.recordingConsumerConfig) - this.partitionLocker = new PartitionLocker( - this.redisPool, - this.recordingConsumerConfig.SESSION_RECORDING_REDIS_PREFIX - ) + // NOTE: globalServerConfig contains the default pluginServer values, typically not pointing at dedicated resources like kafka or redis + // We stil connect to some of the non-dedicated resources such as postgres or the Replay events kafka. + this.config = sessionRecordingConsumerConfig(globalServerConfig) + this.redisPool = createRedisPool(this.config) + + this.realtimeManager = new RealtimeManager(this.redisPool, this.config) + this.partitionLocker = new PartitionLocker(this.redisPool, this.config.SESSION_RECORDING_REDIS_PREFIX) this.offsetHighWaterMarker = new OffsetHighWaterMarker( this.redisPool, - serverConfig.SESSION_RECORDING_REDIS_PREFIX + this.config.SESSION_RECORDING_REDIS_PREFIX ) - this.replayEventsIngester = new ReplayEventsIngester(this.serverConfig, this.offsetHighWaterMarker) + // NOTE: This is the only place where we need to use the shared server config + this.replayEventsIngester = new ReplayEventsIngester(globalServerConfig, this.offsetHighWaterMarker) this.teamsRefresher = new BackgroundRefresher(async () => { try { @@ -140,21 +145,21 @@ export class SessionRecordingIngesterV2 { this.offsetsRefresher = new BackgroundRefresher(async () => { const results = await Promise.all( - Object.keys(this.partitionAssignments).map(async (partition) => { + this.assignedTopicPartitions.map(async ({ partition }) => { return new Promise<[number, number]>((resolve, reject) => { if (!this.batchConsumer) { return reject('Not connected') } this.batchConsumer.consumer.queryWatermarkOffsets( KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, - parseInt(partition), + partition, (err, offsets) => { if (err) { status.error('🔥', 'Failed to query kafka watermark offsets', err) return reject() } - resolve([parseInt(partition), offsets.highOffset]) + resolve([partition, offsets.highOffset]) } ) }) @@ -168,6 +173,24 @@ export class SessionRecordingIngesterV2 { }, 5000) } + private get assignedTopicPartitions(): TopicPartition[] { + return Object.keys(this.partitionAssignments).map((partition) => ({ + partition: parseInt(partition), + topic: this.topic, + })) + } + + private scheduleWork(promise: Promise): Promise { + /** + * Helper to handle graceful shutdowns. Every time we do some work we add a promise to this array and remove it when finished. + * That way when shutting down we can wait for all promises to finish before exiting. + */ + this.promises.add(promise) + promise.finally(() => this.promises.delete(promise)) + + return promise + } + public async consume(event: IncomingRecordingMessage, sentrySpan?: Sentry.Span): Promise { // we have to reset this counter once we're consuming messages since then we know we're not re-balancing // otherwise the consumer continues to report however many sessions were revoked at the last re-balance forever @@ -211,7 +234,7 @@ export class SessionRecordingIngesterV2 { const { partition, topic } = event.metadata const sessionManager = new SessionManager( - this.serverConfig, + this.config, this.objectStorage.s3, this.realtimeManager, this.offsetHighWaterMarker, @@ -312,12 +335,11 @@ export class SessionRecordingIngesterV2 { statsKey: `recordingingester.handleEachBatch`, logExecutionTime: true, func: async () => { - const transaction = Sentry.startTransaction({ name: `blobIngestion_handleEachBatch` }, {}) histogramKafkaBatchSize.observe(messages.length) const recordingMessages: IncomingRecordingMessage[] = [] - if (this.serverConfig.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { + if (this.config.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { await this.partitionLocker.claim(messages) } @@ -365,16 +387,14 @@ export class SessionRecordingIngesterV2 { }) await runInstrumentedFunction({ - statsKey: `recordingingester.handleEachBatch.consumeSerial`, + statsKey: `recordingingester.handleEachBatch.consumeBatch`, func: async () => { - for (const message of recordingMessages) { - const consumeSpan = transaction?.startChild({ - op: 'blobConsume', - }) - - await this.consume(message, consumeSpan) - // TODO: We could do this as batch of offsets for the whole lot... - consumeSpan?.finish() + if (this.config.SESSION_RECORDING_PARALLEL_CONSUMPTION) { + await Promise.all(recordingMessages.map((x) => this.consume(x))) + } else { + for (const message of recordingMessages) { + await this.consume(message) + } } }, }) @@ -397,8 +417,6 @@ export class SessionRecordingIngesterV2 { await this.flushAllReadySessions() }, }) - - transaction.finish() }, }) } @@ -411,8 +429,13 @@ export class SessionRecordingIngesterV2 { // Currently we can't reuse any files stored on disk, so we opt to delete them all try { - rmSync(bufferFileDir(this.serverConfig.SESSION_RECORDING_LOCAL_DIRECTORY), { recursive: true, force: true }) - mkdirSync(bufferFileDir(this.serverConfig.SESSION_RECORDING_LOCAL_DIRECTORY), { recursive: true }) + rmSync(bufferFileDir(this.config.SESSION_RECORDING_LOCAL_DIRECTORY), { + recursive: true, + force: true, + }) + mkdirSync(bufferFileDir(this.config.SESSION_RECORDING_LOCAL_DIRECTORY), { + recursive: true, + }) } catch (e) { status.error('🔥', 'Failed to recreate local buffer directory', e) captureException(e) @@ -424,18 +447,13 @@ export class SessionRecordingIngesterV2 { await this.replayEventsIngester.start() - if (this.serverConfig.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { + if (this.config.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { this.partitionLockInterval = setInterval(async () => { - await this.partitionLocker.claim( - Object.keys(this.partitionAssignments).map((partition) => ({ - partition: parseInt(partition), - topic: this.topic, - })) - ) + await this.partitionLocker.claim(this.assignedTopicPartitions) }, PARTITION_LOCK_INTERVAL_MS) } - const connectionConfig = createRdConnectionConfigFromEnvVars(this.recordingConsumerConfig) + const connectionConfig = createRdConnectionConfigFromEnvVars(this.config) // Create a node-rdkafka consumer that fetches batches of messages, runs // eachBatchWithContext, then commits offsets for the batch. @@ -448,15 +466,15 @@ export class SessionRecordingIngesterV2 { // the largest size of a message that can be fetched by the consumer. // the largest size our MSK cluster allows is 20MB // we only use 9 or 10MB but there's no reason to limit this 🤷️ - consumerMaxBytes: this.recordingConsumerConfig.KAFKA_CONSUMPTION_MAX_BYTES, - consumerMaxBytesPerPartition: this.recordingConsumerConfig.KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION, + consumerMaxBytes: this.config.KAFKA_CONSUMPTION_MAX_BYTES, + consumerMaxBytesPerPartition: this.config.KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION, // our messages are very big, so we don't want to buffer too many - queuedMinMessages: this.recordingConsumerConfig.SESSION_RECORDING_KAFKA_QUEUE_SIZE, - consumerMaxWaitMs: this.recordingConsumerConfig.KAFKA_CONSUMPTION_MAX_WAIT_MS, - consumerErrorBackoffMs: this.recordingConsumerConfig.KAFKA_CONSUMPTION_ERROR_BACKOFF_MS, - fetchBatchSize: this.recordingConsumerConfig.SESSION_RECORDING_KAFKA_BATCH_SIZE, - batchingTimeoutMs: this.recordingConsumerConfig.KAFKA_CONSUMPTION_BATCHING_TIMEOUT_MS, - topicCreationTimeoutMs: this.recordingConsumerConfig.KAFKA_TOPIC_CREATION_TIMEOUT_MS, + queuedMinMessages: this.config.SESSION_RECORDING_KAFKA_QUEUE_SIZE, + consumerMaxWaitMs: this.config.KAFKA_CONSUMPTION_MAX_WAIT_MS, + consumerErrorBackoffMs: this.config.KAFKA_CONSUMPTION_ERROR_BACKOFF_MS, + fetchBatchSize: this.config.SESSION_RECORDING_KAFKA_BATCH_SIZE, + batchingTimeoutMs: this.config.KAFKA_CONSUMPTION_BATCHING_TIMEOUT_MS, + topicCreationTimeoutMs: this.config.KAFKA_TOPIC_CREATION_TIMEOUT_MS, autoCommit: false, eachBatch: async (messages) => { return await this.handleEachBatch(messages) @@ -478,7 +496,7 @@ export class SessionRecordingIngesterV2 { } if (err.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) { - return this.onRevokePartitions(topicPartitions) + return this.scheduleWork(this.onRevokePartitions(topicPartitions)) } // We had a "real" error @@ -499,34 +517,30 @@ export class SessionRecordingIngesterV2 { }) } - public async stop(): Promise { + public async stop(): Promise[]> { status.info('🔁', 'blob_ingester_consumer - stopping') if (this.partitionLockInterval) { clearInterval(this.partitionLockInterval) } - // Mark as stopping so that we don't actually process any more incoming messages, but still keep the process alive await this.batchConsumer?.stop() // Simulate a revoke command to try and flush all sessions - // The rebalance event should have done this but we do it again as an extra precaution and to await the flushes - await this.onRevokePartitions( - Object.keys(this.partitionAssignments).map((partition) => ({ - partition: parseInt(partition), - topic: this.topic, - })) as TopicPartition[] - ) + // There is a race between the revoke callback and this function - Either way one of them gets there and covers the revocations + void this.scheduleWork(this.onRevokePartitions(this.assignedTopicPartitions)) + void this.scheduleWork(this.realtimeManager.unsubscribe()) + void this.scheduleWork(this.replayEventsIngester.stop()) - await this.realtimeManager.unsubscribe() - await this.replayEventsIngester.stop() + const promiseResults = await Promise.allSettled(this.promises) - // This is inefficient but currently necessary due to new instances restarting from the committed offset point - await this.destroySessions(Object.entries(this.sessions)) + // Finally we clear up redis once we are sure everything else has been handled + await this.redisPool.drain() + await this.redisPool.clear() - this.sessions = {} + status.info('👍', 'blob_ingester_consumer - stopped!') - gaugeRealtimeSessions.reset() + return promiseResults } public isHealthy() { @@ -539,7 +553,9 @@ export class SessionRecordingIngesterV2 { this.partitionAssignments[topicPartition.partition] = {} }) - await this.partitionLocker.claim(topicPartitions) + if (this.config.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { + await this.partitionLocker.claim(topicPartitions) + } await this.offsetsRefresher.refresh() } @@ -554,34 +570,17 @@ export class SessionRecordingIngesterV2 { return } - const sessionsToDrop = Object.entries(this.sessions).filter(([_, sessionManager]) => - revokedPartitions.includes(sessionManager.partition) - ) + const sessionsToDrop: SessionManager[] = [] - gaugeSessionsRevoked.set(sessionsToDrop.length) - gaugeSessionsHandled.remove() - - // Attempt to flush all sessions - // TODO: Improve this to - // - work from oldest to newest - // - have some sort of timeout so we don't get stuck here forever - if (this.serverConfig.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { - status.info('🔁', `blob_ingester_consumer - flushing ${sessionsToDrop.length} sessions on revoke...`) - - await runInstrumentedFunction({ - statsKey: `recordingingester.onRevokePartitions.flushSessions`, - logExecutionTime: true, - func: async () => { - await Promise.allSettled( - sessionsToDrop - .map(([_, x]) => x) - .sort((x) => x.buffer.oldestKafkaTimestamp ?? Infinity) - .map((x) => x.flush('partition_shutdown')) - ) - }, - }) - } + // First we pull out all sessions that are being dropped. This way if we get reassigned and start consuming, we don't accidentally destroy them + Object.entries(this.sessions).forEach(([key, sessionManager]) => { + if (revokedPartitions.includes(sessionManager.partition)) { + sessionsToDrop.push(sessionManager) + delete this.sessions[key] + } + }) + // Reset all metrics for the revoked partitions topicPartitions.forEach((topicPartition: TopicPartition) => { const partition = topicPartition.partition @@ -593,11 +592,42 @@ export class SessionRecordingIngesterV2 { this.offsetHighWaterMarker.revoke(topicPartition) }) - if (this.serverConfig.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { - await this.partitionLocker.release(topicPartitions) - } - await this.destroySessions(sessionsToDrop) - await this.offsetsRefresher.refresh() + gaugeSessionsRevoked.set(sessionsToDrop.length) + gaugeSessionsHandled.remove() + + await runInstrumentedFunction({ + statsKey: `recordingingester.onRevokePartitions.revokeSessions`, + logExecutionTime: true, + timeout: 30000, // same as the partition lock + func: async () => { + if (this.config.SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION) { + // Extend our claim on these partitions to give us time to flush + await this.partitionLocker.claim(topicPartitions) + status.info( + '🔁', + `blob_ingester_consumer - flushing ${sessionsToDrop.length} sessions on revoke...` + ) + + // Flush all the sessions we are supposed to drop + await runInstrumentedFunction({ + statsKey: `recordingingester.onRevokePartitions.flushSessions`, + logExecutionTime: true, + func: async () => { + await Promise.allSettled( + sessionsToDrop + .sort((x) => x.buffer.oldestKafkaTimestamp ?? Infinity) + .map((x) => x.flush('partition_shutdown')) + ) + }, + }) + + await this.partitionLocker.release(topicPartitions) + } + + await Promise.allSettled(sessionsToDrop.map((x) => x.destroy())) + await this.offsetsRefresher.refresh() + }, + }) } async flushAllReadySessions(): Promise { @@ -682,11 +712,6 @@ export class SessionRecordingIngesterV2 { this.partitionAssignments[partition].lastKnownCommit = highestOffsetToCommit } - status.info('💾', `blob_ingester_consumer.commitOffsets - attempting to commit offset`, { - partition, - offsetToCommit: highestOffsetToCommit, - }) - this.batchConsumer?.consumer.commit({ ...topicPartition, // see https://kafka.apache.org/10/javadoc/org/apache/kafka/clients/consumer/KafkaConsumer.html for example diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts index eef7fdaa8b6de..30ef80768f985 100644 --- a/plugin-server/src/main/pluginsServer.ts +++ b/plugin-server/src/main/pluginsServer.ts @@ -17,7 +17,7 @@ import { captureEventLoopMetrics } from '../utils/metrics' import { cancelAllScheduledJobs } from '../utils/node-schedule' import { PubSub } from '../utils/pubsub' import { status } from '../utils/status' -import { createRedisPool, delay } from '../utils/utils' +import { delay } from '../utils/utils' import { OrganizationManager } from '../worker/ingestion/organization-manager' import { TeamManager } from '../worker/ingestion/team-manager' import Piscina, { makePiscina as defaultMakePiscina } from '../worker/piscina' @@ -420,27 +420,18 @@ export async function startPluginsServer( const statsd = hub?.statsd ?? createStatsdClient(serverConfig, null) const postgres = hub?.postgres ?? new PostgresRouter(serverConfig, statsd) const s3 = hub?.objectStorage ?? getObjectStorage(recordingConsumerConfig) - const redisPool = hub?.db.redisPool ?? createRedisPool(recordingConsumerConfig) if (!s3) { throw new Error("Can't start session recording blob ingestion without object storage") } // NOTE: We intentionally pass in the original serverConfig as the ingester uses both kafkas - const ingester = new SessionRecordingIngesterV2(serverConfig, postgres, s3, redisPool) + const ingester = new SessionRecordingIngesterV2(serverConfig, postgres, s3) await ingester.start() const batchConsumer = ingester.batchConsumer if (batchConsumer) { - stopSessionRecordingBlobConsumer = async () => { - // Tricky - in some cases the hub is responsible, in which case it will drain and clear. Otherwise we are responsible. - if (!hub?.db.redisPool) { - await redisPool.drain() - await redisPool.clear() - } - - await ingester.stop() - } + stopSessionRecordingBlobConsumer = () => ingester.stop() joinSessionRecordingBlobConsumer = () => batchConsumer.join() healthChecks['session-recordings-blob'] = () => ingester.isHealthy() ?? false } diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 12f76ce214378..b9bfe64dce03d 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -145,6 +145,7 @@ export interface PluginsServerConfig { KAFKA_MAX_MESSAGE_BATCH_SIZE: number KAFKA_FLUSH_FREQUENCY_MS: number APP_METRICS_FLUSH_FREQUENCY_MS: number + APP_METRICS_FLUSH_MAX_QUEUE_SIZE: number BASE_DIR: string // base path for resolving local plugins PLUGINS_RELOAD_PUBSUB_CHANNEL: string // Redis channel for reload events' LOG_LEVEL: LogLevel @@ -221,6 +222,7 @@ export interface PluginsServerConfig { SESSION_RECORDING_REMOTE_FOLDER: string SESSION_RECORDING_REDIS_PREFIX: string SESSION_RECORDING_PARTITION_REVOKE_OPTIMIZATION: boolean + SESSION_RECORDING_PARALLEL_CONSUMPTION: boolean // Dedicated infra values SESSION_RECORDING_KAFKA_HOSTS: string | undefined diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index aeb5c26c95cfa..2ae134ae1fb6a 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -28,6 +28,7 @@ import { AppMetrics } from '../../worker/ingestion/app-metrics' import { OrganizationManager } from '../../worker/ingestion/organization-manager' import { EventsProcessor } from '../../worker/ingestion/process-event' import { TeamManager } from '../../worker/ingestion/team-manager' +import { isTestEnv } from '../env-utils' import { status } from '../status' import { createRedisPool, UUIDT } from '../utils' import { PluginsApiKeyManager } from './../../worker/vm/extensions/helpers/api-key-manager' @@ -192,9 +193,16 @@ export async function createHub( // :TODO: This is only used on worker threads, not main hub.eventsProcessor = new EventsProcessor(hub as Hub) - hub.appMetrics = new AppMetrics(hub as Hub) + hub.appMetrics = new AppMetrics( + kafkaProducer, + serverConfig.APP_METRICS_FLUSH_FREQUENCY_MS, + serverConfig.APP_METRICS_FLUSH_MAX_QUEUE_SIZE + ) const closeHub = async () => { + if (!isTestEnv()) { + await hub.appMetrics?.flush() + } await Promise.allSettled([kafkaProducer.disconnect(), redisPool.drain(), hub.postgres?.end()]) await redisPool.clear() diff --git a/plugin-server/src/worker/ingestion/app-metrics.ts b/plugin-server/src/worker/ingestion/app-metrics.ts index a52345df75a31..333104e967d4a 100644 --- a/plugin-server/src/worker/ingestion/app-metrics.ts +++ b/plugin-server/src/worker/ingestion/app-metrics.ts @@ -2,9 +2,10 @@ import * as Sentry from '@sentry/node' import { Message } from 'kafkajs' import { DateTime } from 'luxon' import { configure } from 'safe-stable-stringify' +import { KafkaProducerWrapper } from 'utils/db/kafka-producer-wrapper' import { KAFKA_APP_METRICS } from '../../config/kafka-topics' -import { Hub, TeamId, TimestampFormat } from '../../types' +import { TeamId, TimestampFormat } from '../../types' import { cleanErrorStackTrace } from '../../utils/db/error' import { status } from '../../utils/status' import { castTimestampOrNow, UUIDT } from '../../utils/utils' @@ -61,52 +62,43 @@ const safeJSONStringify = configure({ }) export class AppMetrics { - hub: Hub + kafkaProducer: KafkaProducerWrapper queuedData: Record flushFrequencyMs: number + maxQueueSize: number - timer: NodeJS.Timeout | null + lastFlushTime: number + // For quick access to queueSize instead of using Object.keys(queuedData).length every time + queueSize: number - constructor(hub: Hub) { - this.hub = hub + constructor(kafkaProducer: KafkaProducerWrapper, flushFrequencyMs: number, maxQueueSize: number) { this.queuedData = {} - this.flushFrequencyMs = hub.APP_METRICS_FLUSH_FREQUENCY_MS - this.timer = null + this.kafkaProducer = kafkaProducer + this.flushFrequencyMs = flushFrequencyMs + this.maxQueueSize = maxQueueSize + this.lastFlushTime = Date.now() + this.queueSize = 0 } - async isAvailable(metric: AppMetric, errorWithContext?: ErrorWithContext): Promise { - if (this.hub.APP_METRICS_GATHERED_FOR_ALL) { - return true - } - - // :TRICKY: If postgres connection is down, we ignore this metric - try { - return await this.hub.organizationManager.hasAvailableFeature(metric.teamId, 'app_metrics') - } catch (err) { - status.warn( - '⚠️', - 'Error querying whether app_metrics is available. Ignoring this metric', - metric, - errorWithContext, - err - ) - return false + async queueMetric(metric: AppMetric, timestamp?: number): Promise { + // We don't want to immediately flush all the metrics every time as we can internally + // aggregate them quite a bit and reduce the message count by a lot. + // However, we also don't want to wait too long, nor have the queue grow too big resulting in + // the flush taking a long time. + const now = Date.now() + if (now - this.lastFlushTime > this.flushFrequencyMs || this.queueSize > this.maxQueueSize) { + await this.flush() } - } - async queueMetric(metric: AppMetric, timestamp?: number): Promise { - timestamp = timestamp || Date.now() + timestamp = timestamp || now const key = this._key(metric) - if (!(await this.isAvailable(metric))) { - return - } - const { successes, successesOnRetry, failures, errorUuid, errorType, errorDetails, ...metricInfo } = metric if (!this.queuedData[key]) { + this.queueSize += 1 this.queuedData[key] = { successes: 0, successesOnRetry: 0, @@ -131,33 +123,29 @@ export class AppMetrics { this.queuedData[key].failures += failures } this.queuedData[key].lastTimestamp = timestamp - - if (this.timer === null) { - this.timer = setTimeout(() => { - this.hub.promiseManager.trackPromise(this.flush(), 'app metrics') - this.timer = null - }, this.flushFrequencyMs) - } } async queueError(metric: AppMetric, errorWithContext: ErrorWithContext, timestamp?: number) { - if (await this.isAvailable(metric, errorWithContext)) { - await this.queueMetric( - { - ...metric, - ...this._metricErrorParameters(errorWithContext), - }, - timestamp - ) - } + await this.queueMetric( + { + ...metric, + ...this._metricErrorParameters(errorWithContext), + }, + timestamp + ) } async flush(): Promise { + console.log(`Flushing app metrics`) + const startTime = Date.now() + this.lastFlushTime = startTime if (Object.keys(this.queuedData).length === 0) { return } + // TODO: We might be dropping some metrics here if someone wrote between queue assigment and queuedData={} assignment const queue = this.queuedData + this.queueSize = 0 this.queuedData = {} const kafkaMessages: Message[] = Object.values(queue).map((value) => ({ @@ -178,10 +166,11 @@ export class AppMetrics { }), })) - await this.hub.kafkaProducer.queueMessage({ + await this.kafkaProducer.queueMessage({ topic: KAFKA_APP_METRICS, messages: kafkaMessages, }) + console.log(`Finisehd flushing app metrics, took ${Date.now() - startTime}ms`) } _metricErrorParameters(errorWithContext: ErrorWithContext): Partial { diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts index c106c6365b2e7..53cc8f019d861 100644 --- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts +++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts @@ -72,6 +72,9 @@ describe('ingester', () => { const team = await getFirstTeam(hub) teamToken = team.api_token await deleteKeysWithPrefix(hub) + + ingester = new SessionRecordingIngesterV2(config, hub.postgres, hub.objectStorage) + await ingester.start() }) afterEach(async () => { @@ -86,12 +89,6 @@ describe('ingester', () => { jest.useRealTimers() }) - // these tests assume that a flush won't run while they run - beforeEach(async () => { - ingester = new SessionRecordingIngesterV2(config, hub.postgres, hub.objectStorage, hub.redisPool) - await ingester.start() - }) - it('creates a new session manager if needed', async () => { const event = createIncomingRecordingMessage() await ingester.consume(event) @@ -339,7 +336,7 @@ describe('ingester', () => { jest.setTimeout(5000) // Increased to cover lock delay beforeEach(async () => { - otherIngester = new SessionRecordingIngesterV2(config, hub.postgres, hub.objectStorage, hub.redisPool) + otherIngester = new SessionRecordingIngesterV2(config, hub.postgres, hub.objectStorage) await otherIngester.start() }) @@ -409,6 +406,11 @@ describe('ingester', () => { otherIngester.onAssignPartitions([createTP(2), createTP(3)]), ] + // Should immediately be removed from the tracked sessions + expect( + Object.values(ingester.sessions).map((x) => `${x.partition}:${x.sessionId}:${x.buffer.count}`) + ).toEqual(['1:session_id_1:1', '1:session_id_2:1']) + // Call the second ingester to receive the messages. The revocation should still be in progress meaning they are "paused" for a bit // Once the revocation is complete the second ingester should receive the messages but drop most of them as they got flushes by the revoke await otherIngester.handleEachBatch([ @@ -438,4 +440,62 @@ describe('ingester', () => { ).toEqual(['2:session_id_4:1']) }) }) + + describe('stop()', () => { + const setup = async (): Promise => { + const partitionMsgs1 = [ + createKafkaMessage( + teamToken, + { + partition: 1, + offset: 1, + }, + { + $session_id: 'session_id_1', + } + ), + + createKafkaMessage( + teamToken, + { + partition: 1, + offset: 2, + }, + { + $session_id: 'session_id_2', + } + ), + ] + + await ingester.onAssignPartitions([createTP(1)]) + await ingester.handleEachBatch(partitionMsgs1) + } + + // NOTE: This test is a sanity check for the follow up test. It demonstrates what happens if we shutdown in the wrong order + // It doesn't reliably work though as the onRevoke is called via the kafka lib ending up with dangling promises so rather it is here as a reminder + // demonstation for when we need it + it.skip('shuts down with error if redis forcefully shutdown', async () => { + await setup() + + await ingester.redisPool.drain() + await ingester.redisPool.clear() + + // revoke, realtime unsub, replay stop + await expect(ingester.stop()).resolves.toMatchObject([ + { status: 'rejected' }, + { status: 'fulfilled' }, + { status: 'fulfilled' }, + ]) + }) + it('shuts down without error', async () => { + await setup() + + // revoke, realtime unsub, replay stop + await expect(ingester.stop()).resolves.toMatchObject([ + { status: 'fulfilled' }, + { status: 'fulfilled' }, + { status: 'fulfilled' }, + ]) + }) + }) }) diff --git a/plugin-server/tests/worker/ingestion/app-metrics.test.ts b/plugin-server/tests/worker/ingestion/app-metrics.test.ts index c46f07998f460..43a2b07364208 100644 --- a/plugin-server/tests/worker/ingestion/app-metrics.test.ts +++ b/plugin-server/tests/worker/ingestion/app-metrics.test.ts @@ -23,18 +23,18 @@ describe('AppMetrics()', () => { let closeHub: () => Promise beforeEach(async () => { - ;[hub, closeHub] = await createHub({ APP_METRICS_FLUSH_FREQUENCY_MS: 100 }) - appMetrics = new AppMetrics(hub) - - jest.spyOn(hub.organizationManager, 'hasAvailableFeature').mockResolvedValue(true) + ;[hub, closeHub] = await createHub({ APP_METRICS_FLUSH_FREQUENCY_MS: 100, APP_METRICS_FLUSH_MAX_QUEUE_SIZE: 5 }) + appMetrics = new AppMetrics( + hub.kafkaProducer, + hub.APP_METRICS_FLUSH_FREQUENCY_MS, + hub.APP_METRICS_FLUSH_MAX_QUEUE_SIZE + ) + // doesn't flush again on the next call, i.e. flust metrics were reset jest.spyOn(hub.kafkaProducer, 'queueMessage').mockReturnValue(Promise.resolve()) }) afterEach(async () => { jest.useRealTimers() - if (appMetrics.timer) { - clearTimeout(appMetrics.timer) - } await closeHub() }) @@ -164,44 +164,34 @@ describe('AppMetrics()', () => { ]) }) - it('creates timer to flush if no timer before', async () => { - jest.spyOn(appMetrics, 'flush') - jest.useFakeTimers() - - await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) - - const timer = appMetrics.timer - expect(timer).not.toBeNull() - - jest.advanceTimersByTime(120) + it('flushes when time is up', async () => { + Date.now = jest.fn(() => 1600000000) + await appMetrics.flush() - expect(appMetrics.timer).toBeNull() - expect(appMetrics.flush).toHaveBeenCalled() - }) + jest.spyOn(appMetrics, 'flush') + Date.now = jest.fn(() => 1600000120) - it('does not create a timer on subsequent requests', async () => { - await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) - const originalTimer = appMetrics.timer await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) - expect(originalTimer).not.toBeNull() - expect(appMetrics.timer).toEqual(originalTimer) - }) - - it('does nothing if feature is not available', async () => { - jest.mocked(hub.organizationManager.hasAvailableFeature).mockResolvedValue(false) - + expect(appMetrics.flush).toHaveBeenCalledTimes(1) + // doesn't flush again on the next call, i.e. flust metrics were reset + Date.now = jest.fn(() => 1600000130) await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) - expect(appMetrics.queuedData).toEqual({}) + expect(appMetrics.flush).toHaveBeenCalledTimes(1) }) - it('does not query `hasAvailableFeature` if not needed', async () => { - hub.APP_METRICS_GATHERED_FOR_ALL = true - - await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) - - expect(appMetrics.queuedData).not.toEqual({}) - expect(hub.organizationManager.hasAvailableFeature).not.toHaveBeenCalled() + it('flushes when max queue size is hit', async () => { + jest.spyOn(appMetrics, 'flush') + // parallel could trigger multiple flushes and make the test flaky + for (let i = 0; i < 7; i++) { + await appMetrics.queueMetric({ ...metric, successes: 1, teamId: i }, timestamp) + } + expect(appMetrics.flush).toHaveBeenCalledTimes(1) + // we only count different keys, so this should not trigger a flush + for (let i = 0; i < 7; i++) { + await appMetrics.queueMetric({ ...metric, successes: 1 }, timestamp) + } + expect(appMetrics.flush).toHaveBeenCalledTimes(1) }) }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 67d43ea804243..74ec71a92715e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -198,8 +198,8 @@ dependencies: specifier: ^5.4.1 version: 5.4.1 posthog-js: - specifier: 1.79.1 - version: 1.79.1 + specifier: 1.81.1 + version: 1.81.1 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -617,7 +617,7 @@ devDependencies: version: 7.3.1 storybook-addon-pseudo-states: specifier: 2.1.0 - version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.3)(@storybook/preview-api@7.4.3)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0) + version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.5)(@storybook/preview-api@7.4.5)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0) style-loader: specifier: ^2.0.0 version: 2.0.0(webpack@5.88.2) @@ -982,12 +982,12 @@ packages: dependencies: '@babel/types': 7.22.10 - /@babel/parser@7.22.16: - resolution: {integrity: sha512-+gPfKv8UWeKKeJTUxe59+OobVcrYHETCsORl61EmSkmgymguYk/X5bp7GuUIXaFsc6y++v8ZxPsLSSuujqDphA==} + /@babel/parser@7.23.0: + resolution: {integrity: sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==} engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.22.19 + '@babel/types': 7.23.0 dev: true /@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10): @@ -2092,8 +2092,8 @@ packages: '@babel/helper-validator-identifier': 7.22.5 to-fast-properties: 2.0.0 - /@babel/types@7.22.19: - resolution: {integrity: sha512-P7LAw/LbojPzkgp5oznjE6tQEIWbp4PkkfrZDINTro9zgBRtI324/EYsiSI7lhPbpIQ+DCeR2NNmMWANGGfZsg==} + /@babel/types@7.23.0: + resolution: {integrity: sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==} engines: {node: '>=6.9.0'} dependencies: '@babel/helper-string-parser': 7.22.5 @@ -4532,11 +4532,11 @@ packages: tiny-invariant: 1.3.1 dev: true - /@storybook/channels@7.4.3: - resolution: {integrity: sha512-lIoRX3EV0wKPX8ojIrJUtsOv4+Gv8r9pfJpam/NdyYd+rs0AjDK13ieINRfBMnJkfjsWa3vmZtGMBEVvDKwTMw==} + /@storybook/channels@7.4.5: + resolution: {integrity: sha512-zWPZn4CxPFXsrrSRQ9JD8GmTeWeFYgr3sTBpe23hnhYookCXVNJ6AcaXogrT9b2ALfbB6MiFDbZIHHTgIgbWpg==} dependencies: - '@storybook/client-logger': 7.4.3 - '@storybook/core-events': 7.4.3 + '@storybook/client-logger': 7.4.5 + '@storybook/core-events': 7.4.5 '@storybook/global': 5.0.0 qs: 6.11.2 telejson: 7.2.0 @@ -4600,8 +4600,8 @@ packages: '@storybook/global': 5.0.0 dev: true - /@storybook/client-logger@7.4.3: - resolution: {integrity: sha512-Nhngo9X4HjN00aRhgIVGWbwkWPe0Fz8PySuxnd8nAxSsz7KpdLFyYo2TbZZ3TX51FG5Fxcb0G5OHuunItP7EWQ==} + /@storybook/client-logger@7.4.5: + resolution: {integrity: sha512-Bn6eTAjhPDUfLpvuxhKkpDpOtkadfkSmkBNBZRu3r0Dzk2J1nNyKV5K6D8dOU4PFVof4z/gXYj5bktT29jKsmw==} dependencies: '@storybook/global': 5.0.0 dev: true @@ -4692,8 +4692,8 @@ packages: resolution: {integrity: sha512-7Pkgwmj/9B7Z3NNSn2swnviBrg9L1VeYSFw6JJKxtQskt8QoY8LxAsPzVMlHjqRmO6sO7lHo9FgpzIFxdmFaAA==} dev: true - /@storybook/core-events@7.4.3: - resolution: {integrity: sha512-FRfipCijMnVbGxL1ZjOLM836lyd/TGQcUFeVjTQWW/+pIGHELqDHiYeq68hqoGTKl0G0np59CJPWYTUZA4Dl9Q==} + /@storybook/core-events@7.4.5: + resolution: {integrity: sha512-Jzy/adSC95saYCZlgXE5j7jmiMLAXYpnBFBxEtBdXwSWEBb0zt21n1nyWBEAv9s/k2gqDXlPHKHeL5Mn6y40zA==} dependencies: ts-dedent: 2.2.0 dev: true @@ -4858,20 +4858,20 @@ packages: ts-dedent: 2.2.0 dev: true - /@storybook/manager-api@7.4.3(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-o5oiL2cJKlY+HNBCdUo5QKT8yXTyYYvBKibSS3YfDKcjeR9RXP+RhdF5lLLh6TzPwfdtLrXQoVI4A/61v2kurQ==} + /@storybook/manager-api@7.4.5(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-8Hdh5Tutet8xRy2fAknczfvpshz09eVnLd8m34vcFceUOYvEnvDbWerufhlEzovsF4v7U32uqbDHKdKTamWEQQ==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: - '@storybook/channels': 7.4.3 - '@storybook/client-logger': 7.4.3 - '@storybook/core-events': 7.4.3 + '@storybook/channels': 7.4.5 + '@storybook/client-logger': 7.4.5 + '@storybook/core-events': 7.4.5 '@storybook/csf': 0.1.1 '@storybook/global': 5.0.0 - '@storybook/router': 7.4.3(react-dom@16.14.0)(react@16.14.0) - '@storybook/theming': 7.4.3(react-dom@16.14.0)(react@16.14.0) - '@storybook/types': 7.4.3 + '@storybook/router': 7.4.5(react-dom@16.14.0)(react@16.14.0) + '@storybook/theming': 7.4.5(react-dom@16.14.0)(react@16.14.0) + '@storybook/types': 7.4.5 dequal: 2.0.3 lodash: 4.17.21 memoizerific: 1.11.3 @@ -4967,15 +4967,15 @@ packages: util-deprecate: 1.0.2 dev: true - /@storybook/preview-api@7.4.3: - resolution: {integrity: sha512-qKwfH2+qN1Zpz2UX6dQLiTU5x2JH3o/+jOY4GYF6c3atTm5WAu1OvCYAJVb6MdXfAhZNuPwDKnJR8VmzWplWBg==} + /@storybook/preview-api@7.4.5: + resolution: {integrity: sha512-6xXQZPyilkGVddfZBI7tMbMMgOyIoZTYgTnwSPTMsXxO0f0TvtNDmGdwhn0I1nREHKfiQGpcQe6gwddEMnGtSg==} dependencies: - '@storybook/channels': 7.4.3 - '@storybook/client-logger': 7.4.3 - '@storybook/core-events': 7.4.3 + '@storybook/channels': 7.4.5 + '@storybook/client-logger': 7.4.5 + '@storybook/core-events': 7.4.5 '@storybook/csf': 0.1.1 '@storybook/global': 5.0.0 - '@storybook/types': 7.4.3 + '@storybook/types': 7.4.5 '@types/qs': 6.9.8 dequal: 2.0.3 lodash: 4.17.21 @@ -5110,13 +5110,13 @@ packages: react-dom: 16.14.0(react@16.14.0) dev: true - /@storybook/router@7.4.3(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-1ab1VTYzzOsBGKeT8xm1kLriIsIsiB/l3t7DdARJxLmPbddKyyXE018w17gfrARCWQ8SM99Ko6+pLmlZ2sm8ug==} + /@storybook/router@7.4.5(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-IM4IhiPiXsx3FAUeUOAB47uiuUS8Yd37VQcNlXLBO28GgHoTSYOrjS+VTGLIV5cAGKr8+H5pFB+q35BnlFUpkQ==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: - '@storybook/client-logger': 7.4.3 + '@storybook/client-logger': 7.4.5 memoizerific: 1.11.3 qs: 6.11.2 react: 16.14.0 @@ -5209,14 +5209,14 @@ packages: react-dom: 16.14.0(react@16.14.0) dev: true - /@storybook/theming@7.4.3(react-dom@16.14.0)(react@16.14.0): - resolution: {integrity: sha512-u5wLwWmhGcTmkcs6f2wDGv+w8wzwbNJat0WaIIbwdJfX7arH6nO5HkBhNxvl6FUFxX0tovp/e9ULzxVPc356jw==} + /@storybook/theming@7.4.5(react-dom@16.14.0)(react@16.14.0): + resolution: {integrity: sha512-QSIJDIMzOegzlhubIBaYIovf4mlf+AVL0SmQOskPS8GZ6s9t77yUUI6gZTEjO+S4eB3djXRsfTTijQ8+z4XmRA==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 dependencies: '@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@16.14.0) - '@storybook/client-logger': 7.4.3 + '@storybook/client-logger': 7.4.5 '@storybook/global': 5.0.0 memoizerific: 1.11.3 react: 16.14.0 @@ -5232,12 +5232,12 @@ packages: file-system-cache: 2.3.0 dev: true - /@storybook/types@7.4.3: - resolution: {integrity: sha512-DrHC1hIiw9TqDILLokDnvbUPNxGz5iJaYFEv30uvYE0s9MvgEUPblCChEUjaHOps7zQTznMPf8ULfoXlgqxk2A==} + /@storybook/types@7.4.5: + resolution: {integrity: sha512-DTWFNjfRTpncjufDoUs0QnNkgHG2qThGKWL1D6sO18cYI02zWPyHWD8/cbqlvtT7XIGe3s1iUEfCTdU5GcwWBA==} dependencies: - '@storybook/channels': 7.4.3 + '@storybook/channels': 7.4.5 '@types/babel__core': 7.20.2 - '@types/express': 4.17.17 + '@types/express': 4.17.18 file-system-cache: 2.3.0 dev: true @@ -5450,8 +5450,8 @@ packages: '@tiptap/core': 2.1.0-rc.12(@tiptap/pm@2.1.0-rc.12) dev: false - /@tiptap/extension-bubble-menu@2.1.0-rc.12(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12): - resolution: {integrity: sha512-Q8DzlM61KAhrq742b0x4+Ey3WChp6X8mIvHRhNhdbChmgtNyKX1d8k72euUC6hKBCUwH4b+AQ5JVmmhoJTfsjQ==} + /@tiptap/extension-bubble-menu@2.1.10(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12): + resolution: {integrity: sha512-XxgJajXkfAj/fChXkIwKBs7/3pd7OxV1uGc6Opx1qW/nSRYx/rr97654Sx/sg6auwIlbpRoqTmyqjbykGX1/yA==} peerDependencies: '@tiptap/core': ^2.0.0 '@tiptap/pm': ^2.0.0 @@ -5652,7 +5652,7 @@ packages: react-dom: ^17.0.0 || ^18.0.0 dependencies: '@tiptap/core': 2.1.0-rc.12(@tiptap/pm@2.1.0-rc.12) - '@tiptap/extension-bubble-menu': 2.1.0-rc.12(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12) + '@tiptap/extension-bubble-menu': 2.1.10(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12) '@tiptap/extension-floating-menu': 2.1.0-rc.12(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12) '@tiptap/pm': 2.1.0-rc.12 react: 16.14.0 @@ -5732,8 +5732,8 @@ packages: /@types/babel__core@7.20.2: resolution: {integrity: sha512-pNpr1T1xLUc2l3xJKuPtsEky3ybxN3m4fJkknfIpTCTfIZCDW57oAg+EfCgIIp2rvCe0Wn++/FfodDS4YXxBwA==} dependencies: - '@babel/parser': 7.22.16 - '@babel/types': 7.22.19 + '@babel/parser': 7.23.0 + '@babel/types': 7.23.0 '@types/babel__generator': 7.6.5 '@types/babel__template': 7.4.2 '@types/babel__traverse': 7.20.2 @@ -5748,7 +5748,7 @@ packages: /@types/babel__generator@7.6.5: resolution: {integrity: sha512-h9yIuWbJKdOPLJTbmSpPzkF67e659PbQDba7ifWm5BJ8xTv+sDmS7rFmywkWOvXedGTivCdeGSIIX8WLcRTz8w==} dependencies: - '@babel/types': 7.22.19 + '@babel/types': 7.23.0 dev: true /@types/babel__template@7.4.1: @@ -5761,8 +5761,8 @@ packages: /@types/babel__template@7.4.2: resolution: {integrity: sha512-/AVzPICMhMOMYoSx9MoKpGDKdBRsIXMNByh1PXSZoa+v6ZoLa8xxtsT/uLQ/NJm0XVAWl/BvId4MlDeXJaeIZQ==} dependencies: - '@babel/parser': 7.22.16 - '@babel/types': 7.22.19 + '@babel/parser': 7.23.0 + '@babel/types': 7.23.0 dev: true /@types/babel__traverse@7.18.2: @@ -5774,7 +5774,7 @@ packages: /@types/babel__traverse@7.20.2: resolution: {integrity: sha512-ojlGK1Hsfce93J0+kn3H5R73elidKUaZonirN33GSmgTUMpzI/MIFfSpF3haANe3G1bEBS9/9/QEqwTzwqFsKw==} dependencies: - '@babel/types': 7.22.19 + '@babel/types': 7.23.0 dev: true /@types/body-parser@1.19.2: @@ -5784,6 +5784,13 @@ packages: '@types/node': 18.11.9 dev: true + /@types/body-parser@1.19.3: + resolution: {integrity: sha512-oyl4jvAfTGX9Bt6Or4H9ni1Z447/tQuxnZsytsCaExKlmJiU8sFgnIBRzJUpKwB5eWn9HuBYlUlVA74q/yN0eQ==} + dependencies: + '@types/connect': 3.4.36 + '@types/node': 18.11.9 + dev: true + /@types/chart.js@2.9.37: resolution: {integrity: sha512-9bosRfHhkXxKYfrw94EmyDQcdjMaQPkU1fH2tDxu8DWXxf1mjzWQAV4laJF51ZbC2ycYwNDvIm1rGez8Bug0vg==} dependencies: @@ -5806,6 +5813,12 @@ packages: '@types/node': 18.11.9 dev: true + /@types/connect@3.4.36: + resolution: {integrity: sha512-P63Zd/JUGq+PdrM1lv0Wv5SBYeA2+CORvbrXbngriYY0jzLUWfQMQQxOhjONEz/wlHOAxOdY7CY65rgQdTjq2w==} + dependencies: + '@types/node': 18.11.9 + dev: true + /@types/cookie@0.4.1: resolution: {integrity: sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q==} dev: true @@ -6072,6 +6085,15 @@ packages: '@types/send': 0.17.1 dev: true + /@types/express-serve-static-core@4.17.37: + resolution: {integrity: sha512-ZohaCYTgGFcOP7u6aJOhY9uIZQgZ2vxC2yWoArY+FeDXlqeH66ZVBjgvg+RLVAS/DWNq4Ap9ZXu1+SUQiiWYMg==} + dependencies: + '@types/node': 18.11.9 + '@types/qs': 6.9.8 + '@types/range-parser': 1.2.4 + '@types/send': 0.17.2 + dev: true + /@types/express@4.17.17: resolution: {integrity: sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==} dependencies: @@ -6081,6 +6103,15 @@ packages: '@types/serve-static': 1.15.2 dev: true + /@types/express@4.17.18: + resolution: {integrity: sha512-Sxv8BSLLgsBYmcnGdGjjEjqET2U+AKAdCRODmMiq02FgjwuV75Ut85DRpvFjyw/Mk0vgUOliGRU0UUmuuZHByQ==} + dependencies: + '@types/body-parser': 1.19.3 + '@types/express-serve-static-core': 4.17.37 + '@types/qs': 6.9.8 + '@types/serve-static': 1.15.3 + dev: true + /@types/find-cache-dir@3.2.1: resolution: {integrity: sha512-frsJrz2t/CeGifcu/6uRo4b+SzAwT4NYCVPu1GN8IB9XTzrpPkGuV0tmh9mN+/L0PklAlsC3u5Fxt0ju00LXIw==} dev: true @@ -6108,6 +6139,10 @@ packages: resolution: {integrity: sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==} dev: true + /@types/http-errors@2.0.2: + resolution: {integrity: sha512-lPG6KlZs88gef6aD85z3HNkztpj7w2R7HmR3gygjfXCQmsLloWNARFkMuzKiiY8FGdh1XDpgBdrSf4aKDiA7Kg==} + dev: true + /@types/image-blob-reduce@4.1.1: resolution: {integrity: sha512-Oe2EPjW+iZSsXccxZPebqHqXAUaOLir3eQVqPx0ryXeJZdCZx+gYvWBZtqYEcluP6f3bll1m06ahT26bX0+LOg==} dependencies: @@ -6377,6 +6412,13 @@ packages: '@types/node': 18.11.9 dev: true + /@types/send@0.17.2: + resolution: {integrity: sha512-aAG6yRf6r0wQ29bkS+x97BIs64ZLxeE/ARwyS6wrldMm3C1MdKwCcnnEwMC1slI8wuxJOpiUH9MioC0A0i+GJw==} + dependencies: + '@types/mime': 1.3.2 + '@types/node': 18.11.9 + dev: true + /@types/serve-static@1.15.2: resolution: {integrity: sha512-J2LqtvFYCzaj8pVYKw8klQXrLLk7TBZmQ4ShlcdkELFKGwGMfevMLneMMRkMgZxotOD9wg497LpC7O8PcvAmfw==} dependencies: @@ -6385,6 +6427,14 @@ packages: '@types/node': 18.11.9 dev: true + /@types/serve-static@1.15.3: + resolution: {integrity: sha512-yVRvFsEMrv7s0lGhzrggJjNOSmZCdgCjw9xWrPr/kNNLp6FaDfMC1KaYl3TSJ0c58bECwNBMoQrZJ8hA8E1eFg==} + dependencies: + '@types/http-errors': 2.0.2 + '@types/mime': 3.0.1 + '@types/node': 18.11.9 + dev: true + /@types/set-cookie-parser@2.4.2: resolution: {integrity: sha512-fBZgytwhYAUkj/jC/FAV4RQ5EerRup1YQsXQCh8rZfiHkc4UahC192oH0smGwsXol3cL3A5oETuAHeQHmhXM4w==} dependencies: @@ -6442,8 +6492,8 @@ packages: '@types/yargs-parser': 21.0.0 dev: true - /@types/yauzl@2.10.0: - resolution: {integrity: sha512-Cn6WYCm0tXv8p6k+A8PvbDG763EDpBoTzHdA+Q/MF6H3sapGjCm9NzoaJncJS9tUKSuCoDs9XHxYYsQDgxR6kw==} + /@types/yauzl@2.10.1: + resolution: {integrity: sha512-CHzgNU3qYBnp/O4S3yv2tXPlvMTq0YWSTVg2/JYLqWZGHwwgJGAwd00poay/11asPq8wLFwHzubyInqHIFmmiw==} requiresBuild: true dependencies: '@types/node': 18.11.9 @@ -10289,7 +10339,7 @@ packages: get-stream: 5.2.0 yauzl: 2.10.0 optionalDependencies: - '@types/yauzl': 2.10.0 + '@types/yauzl': 2.10.1 transitivePeerDependencies: - supports-color dev: true @@ -14966,8 +15016,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.79.1: - resolution: {integrity: sha512-ftW9RHoB9gIYjqVcA/YJeu99MfJaX/vfx/ADgO2yi5QfFWsIWNnfPeWYQskMMxEUTq03svRAwdZHTyOkVkDpIA==} + /posthog-js@1.81.1: + resolution: {integrity: sha512-pQfG9ZGVn3R7Uh1cC/S02trZ6u4TOLs1NhZG3WiNrqMKDA8MJQjZ/PqdkLO0/BeozRBfIbON6pw3xfOIneIclg==} dependencies: fflate: 0.4.8 dev: false @@ -17192,7 +17242,7 @@ packages: resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==} dev: true - /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.3)(@storybook/preview-api@7.4.3)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0): + /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.5)(@storybook/preview-api@7.4.5)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0): resolution: {integrity: sha512-AwbCL1OiZ16aIeXSP/IOovkMwXy7NTZqmjkz+UM2guSGjvogHNA95NhuVyWoqieE+QWUpGO48+MrBGMeeJcHOQ==} peerDependencies: '@storybook/components': ^7.0.0 @@ -17210,8 +17260,8 @@ packages: dependencies: '@storybook/components': 7.3.1(@types/react-dom@16.9.17)(@types/react@16.14.34)(react-dom@16.14.0)(react@16.14.0) '@storybook/core-events': 7.3.1 - '@storybook/manager-api': 7.4.3(react-dom@16.14.0)(react@16.14.0) - '@storybook/preview-api': 7.4.3 + '@storybook/manager-api': 7.4.5(react-dom@16.14.0)(react@16.14.0) + '@storybook/preview-api': 7.4.5 '@storybook/theming': 7.3.1(react-dom@16.14.0)(react@16.14.0) react: 16.14.0 react-dom: 16.14.0(react@16.14.0) diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 880a62128abdc..66364ba617ae9 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -225,6 +225,8 @@ def get_decide(request: HttpRequest): "recorderVersion": "v2", } + response["surveys"] = True if team.surveys_opt_in else False + site_apps = [] # errors mean the database is unavailable, bail in this case if team.inject_web_apps and not errors: diff --git a/posthog/api/instance_status.py b/posthog/api/instance_status.py index 6d685dc783e32..c54b9b2bc071c 100644 --- a/posthog/api/instance_status.py +++ b/posthog/api/instance_status.py @@ -2,6 +2,8 @@ from django.conf import settings from django.db import connection +from django.utils.decorators import method_decorator +from django.views.decorators.cache import cache_page from rest_framework import viewsets from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -33,6 +35,7 @@ class InstanceStatusViewSet(viewsets.ViewSet): permission_classes = [IsAuthenticated, SingleTenancyOrAdmin] + @method_decorator(cache_page(60)) def list(self, request: Request) -> Response: redis_alive = is_redis_alive() postgres_alive = is_postgres_alive() diff --git a/posthog/api/query.py b/posthog/api/query.py index 4c714d6e6fde7..628a55da744ee 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -27,6 +27,7 @@ from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner +from posthog.hogql_queries.trends_query_runner import TrendsQueryRunner from posthog.models import Team from posthog.models.event.events_query import run_events_query from posthog.models.user import User @@ -227,6 +228,10 @@ def process_query( refresh_requested = refresh_requested_by_client(request) if request else False lifecycle_query_runner = LifecycleQueryRunner(query_json, team) return _unwrap_pydantic_dict(lifecycle_query_runner.run(refresh_requested=refresh_requested)) + elif query_kind == "TrendsQuery": + refresh_requested = refresh_requested_by_client(request) if request else False + trends_query_runner = TrendsQueryRunner(query_json, team) + return _unwrap_pydantic_dict(trends_query_runner.run(refresh_requested=refresh_requested)) elif query_kind == "DatabaseSchemaQuery": database = create_hogql_database(team.pk) return serialize_database(database) diff --git a/posthog/api/team.py b/posthog/api/team.py index 179bcb5303754..ebfd3fe2f72d3 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -90,6 +90,7 @@ class Meta: "session_recording_opt_in", "recording_domains", "inject_web_apps", + "surveys_opt_in", ] @@ -139,6 +140,7 @@ class Meta: "inject_web_apps", "extra_settings", "has_completed_onboarding_for", + "surveys_opt_in", ) read_only_fields = ( "id", diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 7bdd2eaad7cfb..e158a0baaf2da 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -48,6 +48,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -197,6 +198,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -513,6 +515,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index eceea7ffb0ead..23060eec170ac 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -48,6 +48,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -121,6 +122,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -437,6 +439,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index ae9901067c7d0..43b1197657722 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -48,6 +48,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -282,6 +283,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -431,6 +433,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -575,6 +578,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr index f5b0252c90b0e..f9cfe83157dc4 100644 --- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr +++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr @@ -19,6 +19,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -145,6 +146,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 943571f635436..8b1ea3b9ebfc4 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -48,6 +48,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index 0e921f83f3f37..f9015d5d9b82c 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -646,6 +646,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -690,6 +691,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -810,6 +812,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1032,6 +1035,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1166,6 +1170,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1287,6 +1292,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1388,6 +1394,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1524,6 +1531,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1603,6 +1611,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1681,6 +1690,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", @@ -1732,6 +1742,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr index 19a504d23eb28..6981b2f9afe16 100644 --- a/posthog/api/test/__snapshots__/test_preflight.ambr +++ b/posthog/api/test/__snapshots__/test_preflight.ambr @@ -59,6 +59,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index f076ff00feeda..dacdb0ebc8550 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -94,6 +94,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py index 023690f4f97d9..ee5202fe9ed2a 100644 --- a/posthog/hogql/database/schema/cohort_people.py +++ b/posthog/hogql/database/schema/cohort_people.py @@ -23,11 +23,9 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str]]): table_name = "raw_cohort_people" + # must always include the person and cohort ids regardless of what other fields are requested requested_fields = {"person_id": ["person_id"], "cohort_id": ["cohort_id"], **requested_fields} - - fields: List[ast.Expr] = [ - ast.Alias(alias=name, expr=ast.Field(chain=[table_name] + chain)) for name, chain in requested_fields.items() - ] + fields: List[ast.Expr] = [ast.Field(chain=[table_name] + chain) for name, chain in requested_fields.items()] return ast.SelectQuery( select=fields, diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py index c90778f6cf308..0b5f6b6cafd46 100644 --- a/posthog/hogql/database/test/test_database.py +++ b/posthog/hogql/database/test/test_database.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from django.test import override_settings +from parameterized import parameterized from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.test.base import BaseTest @@ -26,6 +27,21 @@ def test_serialize_database_with_person_on_events_enabled(self): serialized_database = serialize_database(create_hogql_database(team_id=self.team.pk)) assert json.dumps(serialized_database, indent=4) == self.snapshot + @parameterized.expand([False, True]) + def test_can_select_from_each_table_at_all(self, poe_enabled: bool) -> None: + with override_settings(PERSON_ON_EVENTS_OVERRIDE=poe_enabled): + serialized_database = serialize_database(create_hogql_database(team_id=self.team.pk)) + for table, possible_columns in serialized_database.items(): + if table == "numbers": + execute_hogql_query("SELECT number FROM numbers(10) LIMIT 100", self.team) + else: + columns = [ + x["key"] + for x in possible_columns + if "table" not in x and "chain" not in x and "fields" not in x + ] + execute_hogql_query(f"SELECT {','.join(columns)} FROM {table}", team=self.team) + @patch("posthog.hogql.query.sync_execute", return_value=(None, None)) @pytest.mark.usefixtures("unittest_snapshot") def test_database_with_warehouse_tables(self, patch_execute): diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py new file mode 100644 index 0000000000000..91e1cdaa75b4b --- /dev/null +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -0,0 +1,261 @@ +from posthog.models.entity.entity import Entity as BackendEntity +from posthog.models.filters import AnyInsightFilter +from posthog.models.filters.filter import Filter as LegacyFilter +from posthog.models.filters.path_filter import PathFilter as LegacyPathFilter +from posthog.models.filters.retention_filter import RetentionFilter as LegacyRetentionFilter +from posthog.models.filters.stickiness_filter import StickinessFilter as LegacyStickinessFilter +from posthog.schema import ( + ActionsNode, + BreakdownFilter, + DateRange, + EventsNode, + FunnelExclusion, + FunnelsFilter, + FunnelsQuery, + LifecycleFilter, + LifecycleQuery, + PathsFilter, + PathsQuery, + PropertyGroupFilter, + RetentionFilter, + RetentionQuery, + StickinessFilter, + StickinessQuery, + TrendsFilter, + TrendsQuery, +) +from posthog.types import InsightQueryNode + + +def entity_to_node(entity: BackendEntity) -> EventsNode | ActionsNode: + shared = { + "name": entity.name, + "custom_name": entity.custom_name, + "properties": entity._data.get("properties", None), + "math": entity.math, + "math_property": entity.math_property, + "math_hogql": entity.math_hogql, + "math_group_type_index": entity.math_group_type_index, + } + + if entity.type == "actions": + return ActionsNode(id=entity.id, **shared) + else: + return EventsNode(event=entity.id, **shared) + + +def to_base_entity_dict(entity: BackendEntity): + return { + "type": entity.type, + "id": entity.id, + "name": entity.name, + "custom_name": entity.custom_name, + "order": entity.order, + } + + +insight_to_query_type = { + "TRENDS": TrendsQuery, + "FUNNELS": FunnelsQuery, + "RETENTION": RetentionQuery, + "PATHS": PathsQuery, + "LIFECYCLE": LifecycleQuery, + "STICKINESS": StickinessQuery, +} + + +def _date_range(filter: AnyInsightFilter): + return {"dateRange": DateRange(**filter.date_to_dict())} + + +def _interval(filter: AnyInsightFilter): + if filter.insight == "RETENTION" or filter.insight == "PATHS": + return {} + return {"interval": filter.interval} + + +def _series(filter: AnyInsightFilter): + if filter.insight == "RETENTION" or filter.insight == "PATHS": + return {} + return {"series": map(entity_to_node, filter.entities)} + + +def _sampling_factor(filter: AnyInsightFilter): + return {"samplingFactor": filter.sampling_factor} + + +def _filter_test_accounts(filter: AnyInsightFilter): + return {"filterTestAccounts": filter.filter_test_accounts} + + +def _properties(filter: AnyInsightFilter): + raw_properties = filter._data.get("properties", None) + if raw_properties is None or len(raw_properties) == 0: + return {} + elif isinstance(raw_properties, list): + raw_properties = {"type": "AND", "values": [{"type": "AND", "values": raw_properties}]} + return {"properties": PropertyGroupFilter(**raw_properties)} + else: + return {"properties": PropertyGroupFilter(**raw_properties)} + + +def _breakdown_filter(filter: AnyInsightFilter): + if filter.insight != "TRENDS" and filter.insight != "FUNNELS": + return {} + + breakdownFilter = { + "breakdown_type": filter.breakdown_type, + "breakdown": filter.breakdown, + "breakdown_normalize_url": filter.breakdown_normalize_url, + "breakdown_group_type_index": filter.breakdown_group_type_index, + "breakdown_histogram_bin_count": filter.breakdown_histogram_bin_count if filter.insight == "TRENDS" else None, + } + + if filter.breakdowns is not None: + if len(filter.breakdowns) == 1: + breakdownFilter["breakdown_type"] = filter.breakdowns[0].get("type", None) + breakdownFilter["breakdown"] = filter.breakdowns[0].get("property", None) + else: + raise Exception("Could not convert multi-breakdown property `breakdowns` - found more than one breakdown") + + if breakdownFilter["breakdown"] is not None and breakdownFilter["breakdown_type"] is None: + breakdownFilter["breakdown_type"] = "event" + + return {"breakdown": BreakdownFilter(**breakdownFilter)} + + +def _group_aggregation_filter(filter: AnyInsightFilter): + if isinstance(filter, LegacyStickinessFilter): + return {} + return {"aggregation_group_type_index": filter.aggregation_group_type_index} + + +def _insight_filter(filter: AnyInsightFilter): + if filter.insight == "TRENDS" and isinstance(filter, LegacyFilter): + return { + "trendsFilter": TrendsFilter( + smoothing_intervals=filter.smoothing_intervals, + # show_legend=filter.show_legend, + # hidden_legend_indexes=cleanHiddenLegendIndexes(filter.hidden_legend_keys), + compare=filter.compare, + aggregation_axis_format=filter.aggregation_axis_format, + aggregation_axis_prefix=filter.aggregation_axis_prefix, + aggregation_axis_postfix=filter.aggregation_axis_postfix, + formula=filter.formula, + shown_as=filter.shown_as, + display=filter.display, + # show_values_on_series=filter.show_values_on_series, + # show_percent_stack_view=filter.show_percent_stack_view, + ) + } + elif filter.insight == "FUNNELS" and isinstance(filter, LegacyFilter): + return { + "funnelsFilter": FunnelsFilter( + funnel_viz_type=filter.funnel_viz_type, + funnel_order_type=filter.funnel_order_type, + funnel_from_step=filter.funnel_from_step, + funnel_to_step=filter.funnel_to_step, + funnel_window_interval_unit=filter.funnel_window_interval_unit, + funnel_window_interval=filter.funnel_window_interval, + # funnel_step_reference=filter.funnel_step_reference, + breakdown_attribution_type=filter.breakdown_attribution_type, + breakdown_attribution_value=filter.breakdown_attribution_value, + bin_count=filter.bin_count, + exclusions=[ + FunnelExclusion( + **to_base_entity_dict(entity), + funnel_from_step=entity.funnel_from_step, + funnel_to_step=entity.funnel_to_step, + ) + for entity in filter.exclusions + ], + layout=filter.layout, + # hidden_legend_breakdowns: cleanHiddenLegendSeries(filters.hidden_legend_keys), + funnel_aggregate_by_hogql=filter.funnel_aggregate_by_hogql, + ), + } + elif filter.insight == "RETENTION" and isinstance(filter, LegacyRetentionFilter): + return { + "retentionFilter": RetentionFilter( + retention_type=filter.retention_type, + # retention_reference=filter.retention_reference, + total_intervals=filter.total_intervals, + returning_entity=to_base_entity_dict(filter.returning_entity), + target_entity=to_base_entity_dict(filter.target_entity), + period=filter.period, + ) + } + elif filter.insight == "PATHS" and isinstance(filter, LegacyPathFilter): + return { + "pathsFilter": PathsFilter( + # path_type=filter.path_type, # legacy + paths_hogql_expression=filter.paths_hogql_expression, + include_event_types=filter._data.get("include_event_types"), + start_point=filter.start_point, + end_point=filter.end_point, + path_groupings=filter.path_groupings, + exclude_events=filter.exclude_events, + step_limit=filter.step_limit, + path_replacements=filter.path_replacements, + local_path_cleaning_filters=filter.local_path_cleaning_filters, + edge_limit=filter.edge_limit, + min_edge_weight=filter.min_edge_weight, + max_edge_weight=filter.max_edge_weight, + funnel_paths=filter.funnel_paths, + funnel_filter=filter._data.get("funnel_filter"), + ) + } + elif filter.insight == "LIFECYCLE": + return { + "lifecycleFilter": LifecycleFilter( + shown_as=filter.shown_as, + # toggledLifecycles=filter.toggledLifecycles, + # show_values_on_series=filter.show_values_on_series, + ) + } + elif filter.insight == "STICKINESS" and isinstance(filter, LegacyStickinessFilter): + return { + "stickinessFilter": StickinessFilter( + compare=filter.compare, + shown_as=filter.shown_as, + # show_legend=filter.show_legend, + # hidden_legend_indexes: cleanHiddenLegendIndexes(filters.hidden_legend_keys), + # show_values_on_series=filter.show_values_on_series, + ) + } + else: + raise Exception(f"Invalid insight type {filter.insight}.") + + +def filter_to_query(filter: AnyInsightFilter) -> InsightQueryNode: + if (filter.insight == "TRENDS" or filter.insight == "FUNNELS" or filter.insight == "LIFECYCLE") and isinstance( + filter, LegacyFilter + ): + matching_filter_type = True + elif filter.insight == "RETENTION" and isinstance(filter, LegacyRetentionFilter): + matching_filter_type = True + elif filter.insight == "PATHS" and isinstance(filter, LegacyPathFilter): + matching_filter_type = True + elif filter.insight == "STICKINESS" and isinstance(filter, LegacyStickinessFilter): + matching_filter_type = True + else: + matching_filter_type = False + + if not matching_filter_type: + raise Exception(f"Filter type {type(filter)} does not match insight type {filter.insight}") + + Query = insight_to_query_type[filter.insight] + + data = { + **_date_range(filter), + **_interval(filter), + **_series(filter), + **_sampling_factor(filter), + **_filter_test_accounts(filter), + **_properties(filter), + **_breakdown_filter(filter), + **_group_aggregation_filter(filter), + **_insight_filter(filter), + } + + return Query(**data) diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py new file mode 100644 index 0000000000000..6359b9e3e808d --- /dev/null +++ b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py @@ -0,0 +1,1006 @@ +import pytest +from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query +from posthog.models.filters.filter import Filter as LegacyFilter +from posthog.models.filters.path_filter import PathFilter as LegacyPathFilter +from posthog.models.filters.retention_filter import RetentionFilter as LegacyRetentionFilter +from posthog.models.filters.stickiness_filter import StickinessFilter as LegacyStickinessFilter +from posthog.schema import ( + ActionsNode, + AggregationAxisFormat, + BaseMathType, + BreakdownAttributionType, + BreakdownFilter, + BreakdownType, + ChartDisplayType, + CohortPropertyFilter, + CountPerActorMathType, + ElementPropertyFilter, + EntityType, + EventPropertyFilter, + EventsNode, + FunnelConversionWindowTimeUnit, + FunnelExclusion, + FunnelPathType, + FunnelVizType, + GroupPropertyFilter, + HogQLPropertyFilter, + Key, + PathCleaningFilter, + PathType, + PersonPropertyFilter, + PropertyMathType, + PropertyOperator, + RetentionPeriod, + RetentionType, + SessionPropertyFilter, + ShownAsValue, + StepOrderValue, + TrendsFilter, + FunnelsFilter, + RetentionFilter, + PathsFilter, + StickinessFilter, + LifecycleFilter, +) +from posthog.test.base import BaseTest + + +insight_0 = { + "events": [{"id": "signed_up", "type": "events", "order": 0}], + "actions": [], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "week", + "date_from": "-8w", +} +insight_1 = { + "events": [{"id": "signed_up", "type": "events", "order": 0}], + "actions": [], + "display": "WorldMap", + "insight": "TRENDS", + "breakdown": "$geoip_country_code", + "date_from": "-1m", + "breakdown_type": "event", +} +insight_2 = { + "events": [ + {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"}, + {"id": "upgraded_plan", "name": "upgraded_plan", "type": "events", "order": 4, "custom_name": "Upgraded plan"}, + ], + "actions": [{"id": 1, "name": "Interacted with file", "type": "actions", "order": 3}], + "display": "FunnelViz", + "insight": "FUNNELS", + "interval": "day", + "date_from": "-1m", + "funnel_viz_type": "steps", + "filter_test_accounts": True, +} +insight_3 = { + "period": "Week", + "display": "ActionsTable", + "insight": "RETENTION", + "properties": { + "type": "AND", + "values": [ + {"type": "AND", "values": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}]} + ], + }, + "target_entity": {"id": "signed_up", "name": "signed_up", "type": "events", "order": 0}, + "retention_type": "retention_first_time", + "total_intervals": 9, + "returning_entity": {"id": 1, "name": "Interacted with file", "type": "actions", "order": 0}, +} +insight_4 = { + "events": [], + "actions": [{"id": 1, "math": "total", "name": "Interacted with file", "type": "actions", "order": 0}], + "compare": False, + "display": "ActionsLineGraph", + "insight": "LIFECYCLE", + "interval": "day", + "shown_as": "Lifecycle", + "date_from": "-8w", + "new_entity": [], + "properties": [], + "filter_test_accounts": True, +} +insight_5 = { + "events": [ + { + "id": "uploaded_file", + "math": "sum", + "name": "uploaded_file", + "type": "events", + "order": 0, + "custom_name": "Uploaded bytes", + "math_property": "file_size_b", + }, + { + "id": "deleted_file", + "math": "sum", + "name": "deleted_file", + "type": "events", + "order": 1, + "custom_name": "Deleted bytes", + "math_property": "file_size_b", + }, + ], + "actions": [], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "week", + "date_from": "-8w", + "new_entity": [], + "properties": [], + "filter_test_accounts": True, +} +insight_6 = { + "events": [{"id": "paid_bill", "math": "sum", "type": "events", "order": 0, "math_property": "amount_usd"}], + "actions": [], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "month", + "date_from": "-6m", +} +insight_7 = { + "events": [ + { + "id": "paid_bill", + "math": "unique_group", + "name": "paid_bill", + "type": "events", + "order": 0, + "math_group_type_index": 0, + } + ], + "actions": [], + "compare": True, + "date_to": None, + "display": "BoldNumber", + "insight": "TRENDS", + "interval": "day", + "date_from": "-30d", + "properties": [], + "filter_test_accounts": True, +} +insight_8 = { + "events": [{"id": "$pageview", "math": "total", "type": "events", "order": 0}], + "actions": [], + "display": "ActionsTable", + "insight": "TRENDS", + "interval": "day", + "breakdown": "$current_url", + "date_from": "-6m", + "new_entity": [], + "properties": { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "$current_url", "type": "event", "value": "/files/", "operator": "not_icontains"}], + } + ], + }, + "breakdown_type": "event", +} +insight_9 = { + "events": [ + { + "id": "$pageview", + "name": "$pageview", + "type": "events", + "order": 0, + "properties": [ + {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/", "operator": "exact"} + ], + "custom_name": "Viewed homepage", + }, + { + "id": "$pageview", + "name": "$pageview", + "type": "events", + "order": 1, + "properties": [ + {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/signup/", "operator": "regex"} + ], + "custom_name": "Viewed signup page", + }, + {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"}, + ], + "actions": [], + "display": "FunnelViz", + "insight": "FUNNELS", + "interval": "day", + "date_from": "-1m", + "funnel_viz_type": "steps", + "filter_test_accounts": True, +} +insight_10 = { + "date_to": None, + "insight": "PATHS", + "date_from": "-30d", + "edge_limit": 50, + "properties": {"type": "AND", "values": []}, + "step_limit": 5, + "start_point": "https://hedgebox.net/", + "funnel_filter": {}, + "exclude_events": [], + "path_groupings": ["/files/*"], + "include_event_types": ["$pageview"], + "local_path_cleaning_filters": [], +} +insight_11 = { + "events": [ + {"id": "uploaded_file", "type": "events", "order": 0}, + {"id": "deleted_file", "type": "events", "order": 2}, + {"id": "downloaded_file", "type": "events", "order": 1}, + ], + "actions": [], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "day", + "date_from": "-30d", +} +insight_12 = { + "events": [{"id": "$pageview", "math": "dau", "type": "events"}], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "day", + "date_from": "-30d", + "filter_test_accounts": True, +} +insight_13 = { + "events": [{"id": "$pageview", "math": "dau", "type": "events"}], + "display": "ActionsLineGraph", + "insight": "TRENDS", + "interval": "week", + "date_from": "-90d", + "filter_test_accounts": True, +} +insight_14 = { + "period": "Week", + "insight": "RETENTION", + "target_entity": {"id": "$pageview", "type": "events"}, + "retention_type": "retention_first_time", + "returning_entity": {"id": "$pageview", "type": "events"}, + "filter_test_accounts": True, +} +insight_15 = { + "events": [{"id": "$pageview", "type": "events"}], + "insight": "LIFECYCLE", + "interval": "week", + "shown_as": "Lifecycle", + "date_from": "-30d", + "entity_type": "events", + "filter_test_accounts": True, +} +insight_16 = { + "events": [{"id": "$pageview", "math": "dau", "type": "events"}], + "display": "ActionsBarValue", + "insight": "TRENDS", + "interval": "day", + "breakdown": "$referring_domain", + "date_from": "-14d", + "breakdown_type": "event", + "filter_test_accounts": True, +} +insight_17 = { + "events": [ + {"id": "$pageview", "type": "events", "order": 0, "custom_name": "First page view"}, + {"id": "$pageview", "type": "events", "order": 1, "custom_name": "Second page view"}, + {"id": "$pageview", "type": "events", "order": 2, "custom_name": "Third page view"}, + ], + "layout": "horizontal", + "display": "FunnelViz", + "insight": "FUNNELS", + "interval": "day", + "breakdown": "$browser", + "exclusions": [], + "breakdown_type": "event", + "funnel_viz_type": "steps", + "filter_test_accounts": True, +} + +test_insights = [ + insight_0, + insight_1, + insight_2, + insight_3, + insight_4, + insight_5, + insight_6, + insight_7, + insight_8, + insight_9, + insight_10, + insight_11, + insight_12, + insight_13, + insight_14, + insight_15, + insight_16, + insight_17, +] + + +@pytest.mark.parametrize("insight", test_insights) +def test_base_insights(insight): + """smoke test (i.e. filter_to_query should not throw) for real world insights""" + if insight.get("insight") == "RETENTION": + filter = LegacyRetentionFilter(data=insight) + elif insight.get("insight") == "PATHS": + filter = LegacyPathFilter(data=insight) + elif insight.get("insight") == "STICKINESS": + filter = LegacyStickinessFilter(data=insight) + else: + filter = LegacyFilter(data=insight) + filter_to_query(filter) + + +properties_0 = [] +properties_1 = [{"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"}] +properties_2 = [ + {"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"}, + {"key": "$current_url", "type": "event", "value": "/path", "operator": "not_icontains"}, +] +properties_3 = {} +properties_4 = {"type": "AND", "values": []} +properties_5 = {"type": "AND", "values": [{"type": "AND", "values": []}]} +properties_6 = { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"}, + {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, + ], + } + ], +} +properties_7 = { + "type": "AND", + "values": [ + {"type": "AND", "values": [{"type": "AND", "values": []}, {"type": "AND", "values": []}]}, + { + "type": "AND", + "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + }, + ], +} +properties_8 = { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + }, + { + "type": "AND", + "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + }, + ], +} +properties_9 = { + "type": "AND", + "values": [ + { + "type": "AND", + "values": [ + {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}, + {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "person"}, + {"key": "$feature/hogql-insights", "value": ["true"], "operator": "exact", "type": "event"}, + { + "key": "site_url", + "value": ["http://localhost:8000"], + "operator": "exact", + "type": "group", + "group_type_index": 1, + }, + {"key": "id", "value": 2, "type": "cohort"}, + {"key": "tag_name", "value": ["elem"], "operator": "exact", "type": "element"}, + {"key": "$session_duration", "value": None, "operator": "gt", "type": "session"}, + {"type": "hogql", "key": "properties.name", "value": None}, + ], + }, + {"type": "OR", "values": [{}]}, + ], +} + +test_properties = [ + properties_0, + properties_1, + properties_2, + properties_3, + properties_4, + properties_5, + properties_6, + properties_7, + properties_8, + properties_9, +] + + +@pytest.mark.parametrize("properties", test_properties) +def test_base_properties(properties): + """smoke test (i.e. filter_to_query should not throw) for real world properties""" + filter = LegacyFilter(data={"properties": properties}) + filter_to_query(filter) + + +class TestFilterToQuery(BaseTest): + def test_base_trend(self): + filter = LegacyFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "TrendsQuery") + + def test_full_trend(self): + filter = LegacyFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual( + query.model_dump(exclude_defaults=True), + { + "dateRange": {"date_from": "-7d"}, + "interval": "day", + "series": [], + "filterTestAccounts": False, + "breakdown": {"breakdown_normalize_url": False}, + "trendsFilter": { + "compare": False, + "display": ChartDisplayType.ActionsLineGraph, + "smoothing_intervals": 1, + }, + }, + ) + + def test_base_funnel(self): + filter = LegacyFilter(data={"insight": "FUNNELS"}) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "FunnelsQuery") + + def test_base_retention_query(self): + filter = LegacyFilter(data={"insight": "RETENTION"}) + + with pytest.raises(Exception) as exception: + filter_to_query(filter) + + self.assertEqual( + str(exception.value), + "Filter type does not match insight type RETENTION", + ) + + def test_base_retention_query_from_retention_filter(self): + filter = LegacyRetentionFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "RetentionQuery") + + def test_base_paths_query(self): + filter = LegacyFilter(data={"insight": "PATHS"}) + + with pytest.raises(Exception) as exception: + filter_to_query(filter) + + self.assertEqual( + str(exception.value), + "Filter type does not match insight type PATHS", + ) + + def test_base_path_query_from_path_filter(self): + filter = LegacyPathFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "PathsQuery") + + def test_base_lifecycle_query(self): + filter = LegacyFilter(data={"insight": "LIFECYCLE"}) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "LifecycleQuery") + + def test_base_stickiness_query(self): + filter = LegacyFilter(data={"insight": "STICKINESS"}) + + with pytest.raises(Exception) as exception: + filter_to_query(filter) + + self.assertEqual( + str(exception.value), + "Filter type does not match insight type STICKINESS", + ) + + def test_base_stickiness_query_from_stickiness_filter(self): + filter = LegacyStickinessFilter(data={}, team=self.team) + + query = filter_to_query(filter) + + self.assertEqual(query.kind, "StickinessQuery") + + def test_date_range_default(self): + filter = LegacyFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.dateRange.date_from, "-7d") + self.assertEqual(query.dateRange.date_to, None) + + def test_date_range_custom(self): + filter = LegacyFilter(data={"date_from": "-14d", "date_to": "-7d"}) + + query = filter_to_query(filter) + + self.assertEqual(query.dateRange.date_from, "-14d") + self.assertEqual(query.dateRange.date_to, "-7d") + + def test_interval_default(self): + filter = LegacyFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.interval, "day") + + def test_interval_custom(self): + filter = LegacyFilter(data={"interval": "hour"}) + + query = filter_to_query(filter) + + self.assertEqual(query.interval, "hour") + + def test_series_default(self): + filter = LegacyFilter(data={}) + + query = filter_to_query(filter) + + self.assertEqual(query.series, []) + + def test_series_custom(self): + filter = LegacyFilter( + data={ + "events": [{"id": "$pageview"}, {"id": "$pageview", "math": "dau"}], + "actions": [{"id": 1}, {"id": 1, "math": "dau"}], + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.series, + [ + ActionsNode(id=1), + ActionsNode(id=1, math=BaseMathType.dau), + EventsNode(event="$pageview", name="$pageview"), + EventsNode(event="$pageview", name="$pageview", math=BaseMathType.dau), + ], + ) + + def test_series_order(self): + filter = LegacyFilter( + data={ + "events": [{"id": "$pageview", "order": 1}, {"id": "$pageview", "math": "dau", "order": 2}], + "actions": [{"id": 1, "order": 3}, {"id": 1, "math": "dau", "order": 0}], + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.series, + [ + ActionsNode(id=1, math=BaseMathType.dau), + EventsNode(event="$pageview", name="$pageview"), + EventsNode(event="$pageview", name="$pageview", math=BaseMathType.dau), + ActionsNode(id=1), + ], + ) + + def test_series_math(self): + filter = LegacyFilter( + data={ + "events": [ + {"id": "$pageview", "math": "dau"}, # base math type + {"id": "$pageview", "math": "median", "math_property": "$math_prop"}, # property math type + {"id": "$pageview", "math": "avg_count_per_actor"}, # count per actor math type + {"id": "$pageview", "math": "unique_group", "math_group_type_index": 0}, # unique group + { + "id": "$pageview", + "math": "hogql", + "math_hogql": "avg(toInt(properties.$session_id)) + 1000", + }, # hogql + ] + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.series, + [ + EventsNode(event="$pageview", name="$pageview", math=BaseMathType.dau), + EventsNode( + event="$pageview", name="$pageview", math=PropertyMathType.median, math_property="$math_prop" + ), + EventsNode(event="$pageview", name="$pageview", math=CountPerActorMathType.avg_count_per_actor), + EventsNode(event="$pageview", name="$pageview", math="unique_group", math_group_type_index=0), + EventsNode( + event="$pageview", + name="$pageview", + math="hogql", + math_hogql="avg(toInt(properties.$session_id)) + 1000", + ), + ], + ) + + def test_series_properties(self): + filter = LegacyFilter( + data={ + "events": [ + {"id": "$pageview", "properties": []}, # smoke test + { + "id": "$pageview", + "properties": [{"key": "success", "type": "event", "value": ["true"], "operator": "exact"}], + }, + { + "id": "$pageview", + "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + }, + { + "id": "$pageview", + "properties": [{"key": "text", "value": ["some text"], "operator": "exact", "type": "element"}], + }, + { + "id": "$pageview", + "properties": [{"key": "$session_duration", "value": 1, "operator": "gt", "type": "session"}], + }, + {"id": "$pageview", "properties": [{"key": "id", "value": 2, "type": "cohort"}]}, + { + "id": "$pageview", + "properties": [ + { + "key": "name", + "value": ["Hedgebox Inc."], + "operator": "exact", + "type": "group", + "group_type_index": 2, + } + ], + }, + { + "id": "$pageview", + "properties": [ + {"key": "dateDiff('minute', timestamp, now()) < 30", "type": "hogql", "value": None} + ], + }, + { + "id": "$pageview", + "properties": [ + {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, + {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"}, + ], + }, + ] + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.series, + [ + EventsNode(event="$pageview", name="$pageview", properties=[]), + EventsNode( + event="$pageview", + name="$pageview", + properties=[EventPropertyFilter(key="success", value=["true"], operator=PropertyOperator.exact)], + ), + EventsNode( + event="$pageview", + name="$pageview", + properties=[PersonPropertyFilter(key="email", value="is_set", operator=PropertyOperator.is_set)], + ), + EventsNode( + event="$pageview", + name="$pageview", + properties=[ + ElementPropertyFilter(key=Key.text, value=["some text"], operator=PropertyOperator.exact) + ], + ), + EventsNode( + event="$pageview", + name="$pageview", + properties=[SessionPropertyFilter(value=1, operator=PropertyOperator.gt)], + ), + EventsNode(event="$pageview", name="$pageview", properties=[CohortPropertyFilter(value=2)]), + EventsNode( + event="$pageview", + name="$pageview", + properties=[ + GroupPropertyFilter( + key="name", value=["Hedgebox Inc."], operator=PropertyOperator.exact, group_type_index=2 + ) + ], + ), + EventsNode( + event="$pageview", + name="$pageview", + properties=[HogQLPropertyFilter(key="dateDiff('minute', timestamp, now()) < 30")], + ), + EventsNode( + event="$pageview", + name="$pageview", + properties=[ + EventPropertyFilter( + key="$referring_domain", value="google", operator=PropertyOperator.icontains + ), + EventPropertyFilter(key="utm_source", value="is_not_set", operator=PropertyOperator.is_not_set), + ], + ), + ], + ) + + def test_breakdown(self): + filter = LegacyFilter(data={"breakdown_type": "event", "breakdown": "$browser"}) + + query = filter_to_query(filter) + + self.assertEqual( + query.breakdown, + BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="$browser", breakdown_normalize_url=False), + ) + + def test_breakdown_converts_multi(self): + filter = LegacyFilter(data={"breakdowns": [{"type": "event", "property": "$browser"}]}) + + query = filter_to_query(filter) + + self.assertEqual( + query.breakdown, + BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="$browser", breakdown_normalize_url=False), + ) + + def test_breakdown_type_default(self): + filter = LegacyFilter(data={"breakdown": "some_prop"}) + + query = filter_to_query(filter) + + self.assertEqual( + query.breakdown, + BreakdownFilter(breakdown_type=BreakdownType.event, breakdown="some_prop", breakdown_normalize_url=False), + ) + + def test_trends_filter(self): + filter = LegacyFilter( + data={ + "smoothing_intervals": 2, + "compare": True, + "aggregation_axis_format": "duration_ms", + "aggregation_axis_prefix": "pre", + "aggregation_axis_postfix": "post", + "formula": "A + B", + "shown_as": "Volume", + "display": "ActionsAreaGraph", + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.trendsFilter, + TrendsFilter( + smoothing_intervals=2, + compare=True, + aggregation_axis_format=AggregationAxisFormat.duration_ms, + aggregation_axis_prefix="pre", + aggregation_axis_postfix="post", + formula="A + B", + shown_as=ShownAsValue.Volume, + display=ChartDisplayType.ActionsAreaGraph, + ), + ) + + def test_funnels_filter(self): + filter = LegacyFilter( + data={ + "insight": "FUNNELS", + "funnel_viz_type": "steps", + "funnel_window_interval_unit": "hour", + "funnel_window_interval": 13, + "breakdown_attribution_type": "step", + "breakdown_attribution_value": 2, + "funnel_order_type": "strict", + "funnel_aggregate_by_hogql": "person_id", + "exclusions": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "funnel_from_step": 1, + "funnel_to_step": 2, + } + ], + "bin_count": 15, # used in time to convert: number of bins to show in histogram + "funnel_from_step": 1, # used in time to convert: initial step index to compute time to convert + "funnel_to_step": 2, # used in time to convert: ending step index to compute time to convert + # + # frontend only params + # "layout": layout, + # "funnel_step_reference": "previous", # whether conversion shown in graph should be across all steps or just from the previous step + # hidden_legend_keys # used to toggle visibilities in table and legend + # + # persons endpoint only params + # "funnel_step_breakdown": funnel_step_breakdown, # used in steps breakdown: persons modal + # "funnel_correlation_person_entity":funnel_correlation_person_entity, + # "funnel_correlation_person_converted":funnel_correlation_person_converted, # success or failure counts + # "entrance_period_start": entrance_period_start, # this and drop_off is used for funnels time conversion date for the persons modal + # "drop_off": drop_off, + # "funnel_step": funnel_step, + # "funnel_custom_steps": funnel_custom_steps, + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.funnelsFilter, + FunnelsFilter( + funnel_viz_type=FunnelVizType.steps, + funnel_from_step=1, + funnel_to_step=2, + funnel_window_interval_unit=FunnelConversionWindowTimeUnit.hour, + funnel_window_interval=13, + breakdown_attribution_type=BreakdownAttributionType.step, + breakdown_attribution_value=2, + funnel_order_type=StepOrderValue.strict, + exclusions=[ + FunnelExclusion( + id="$pageview", + type=EntityType.events, + order=0, + name="$pageview", + funnel_from_step=1, + funnel_to_step=2, + ) + ], + bin_count=15, + funnel_aggregate_by_hogql="person_id", + # funnel_step_reference=FunnelStepReference.previous, + ), + ) + + def test_retention_filter(self): + filter = LegacyRetentionFilter( + data={ + "retention_type": "retention_first_time", + # retention_reference="previous", + "total_intervals": 12, + "returning_entity": {"id": "$pageview", "name": "$pageview", "type": "events"}, + "target_entity": {"id": "$pageview", "name": "$pageview", "type": "events"}, + "period": "Week", + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.retentionFilter, + RetentionFilter( + retention_type=RetentionType.retention_first_time, + total_intervals=12, + period=RetentionPeriod.Week, + returning_entity={ + "id": "$pageview", + "name": "$pageview", + "type": "events", + "custom_name": None, + "order": None, + }, + target_entity={ + "id": "$pageview", + "name": "$pageview", + "type": "events", + "custom_name": None, + "order": None, + }, + ), + ) + + def test_paths_filter(self): + filter = LegacyPathFilter( + data={ + "include_event_types": ["$pageview", "hogql"], + "start_point": "http://localhost:8000/events", + "end_point": "http://localhost:8000/home", + "paths_hogql_expression": "event", + "edge_limit": 50, + "min_edge_weight": 10, + "max_edge_weight": 20, + "local_path_cleaning_filters": [{"alias": "merchant", "regex": "\\/merchant\\/\\d+\\/dashboard$"}], + "path_replacements": True, + "exclude_events": ["http://localhost:8000/events"], + "step_limit": 5, + "path_groupings": ["/merchant/*/payment"], + "funnel_paths": "funnel_path_between_steps", + "funnel_filter": { + "insight": "FUNNELS", + "events": [ + {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"}, + {"type": "events", "id": None, "order": 1, "math": "total"}, + ], + "funnel_viz_type": "steps", + "exclusions": [], + "filter_test_accounts": True, + "funnel_step": 2, + }, + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.pathsFilter, + PathsFilter( + include_event_types=[PathType.field_pageview, PathType.hogql], + paths_hogql_expression="event", + start_point="http://localhost:8000/events", + end_point="http://localhost:8000/home", + edge_limit=50, + min_edge_weight=10, + max_edge_weight=20, + local_path_cleaning_filters=[ + PathCleaningFilter(alias="merchant", regex="\\/merchant\\/\\d+\\/dashboard$") + ], + path_replacements=True, + exclude_events=["http://localhost:8000/events"], + step_limit=5, + path_groupings=["/merchant/*/payment"], + funnel_paths=FunnelPathType.funnel_path_between_steps, + funnel_filter={ + "insight": "FUNNELS", + "events": [ + {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"}, + {"type": "events", "id": None, "order": 1, "math": "total"}, + ], + "funnel_viz_type": "steps", + "exclusions": [], + "filter_test_accounts": True, + "funnel_step": 2, + }, + ), + ) + + def test_stickiness_filter(self): + filter = LegacyStickinessFilter( + data={"insight": "STICKINESS", "compare": True, "shown_as": "Stickiness"}, team=self.team + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.stickinessFilter, + StickinessFilter(compare=True, shown_as=ShownAsValue.Stickiness), + ) + + def test_lifecycle_filter(self): + filter = LegacyFilter( + data={ + "insight": "LIFECYCLE", + "shown_as": "Lifecycle", + } + ) + + query = filter_to_query(filter) + + self.assertEqual( + query.lifecycleFilter, + LifecycleFilter( + shown_as=ShownAsValue.Lifecycle, + ), + ) diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 30a6627d9a310..5dbd4850e599d 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from datetime import datetime, timedelta +from datetime import datetime from typing import Any, Generic, List, Optional, Type, Dict, TypeVar from prometheus_client import Counter @@ -119,9 +119,9 @@ def _cache_key(self) -> str: return generate_cache_key(f"query_{self.toJSON()}_{self.team.pk}_{self.team.timezone}") @abstractmethod - def _is_stale(self, cached_result_package) -> bool: + def _is_stale(self, cached_result_package): raise NotImplementedError() @abstractmethod - def _refresh_frequency(self) -> timedelta: + def _refresh_frequency(self): raise NotImplementedError() diff --git a/posthog/hogql_queries/trends_query_runner.py b/posthog/hogql_queries/trends_query_runner.py new file mode 100644 index 0000000000000..373b55b32790b --- /dev/null +++ b/posthog/hogql_queries/trends_query_runner.py @@ -0,0 +1,311 @@ +from datetime import timedelta +from itertools import groupby +from math import ceil +from operator import itemgetter +from typing import List, Optional, Any, Dict + +from django.utils.timezone import datetime +from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL +from posthog.caching.utils import is_stale + +from posthog.hogql import ast +from posthog.hogql.parser import parse_expr, parse_select +from posthog.hogql.property import property_to_expr +from posthog.hogql.query import execute_hogql_query +from posthog.hogql.timings import HogQLTimings +from posthog.hogql_queries.query_runner import QueryRunner +from posthog.hogql_queries.utils.formula_ast import FormulaAST +from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.hogql_queries.utils.query_previous_period_date_range import QueryPreviousPeriodDateRange +from posthog.models import Team +from posthog.models.filters.mixins.utils import cached_property +from posthog.schema import ActionsNode, EventsNode, HogQLQueryResponse, TrendsQuery, TrendsQueryResponse + + +class SeriesWithExtras: + series: EventsNode | ActionsNode + is_previous_period_series: Optional[bool] + + def __init__(self, series: EventsNode | ActionsNode, is_previous_period_series: Optional[bool]): + self.series = series + self.is_previous_period_series = is_previous_period_series + + +class TrendsQueryRunner(QueryRunner): + query: TrendsQuery + query_type = TrendsQuery + series: List[SeriesWithExtras] + + def __init__(self, query: TrendsQuery | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None): + super().__init__(query, team, timings) + self.series = self.setup_series() + + def to_query(self) -> List[ast.SelectQuery]: + queries = [] + with self.timings.measure("trends_query"): + for series in self.series: + if not series.is_previous_period_series: + date_placeholders = self.query_date_range.to_placeholders() + else: + date_placeholders = self.query_previous_date_range.to_placeholders() + + queries.append( + parse_select( + """ + SELECT + groupArray(day_start) AS date, + groupArray(count) AS total + FROM + ( + SELECT + sum(total) AS count, + day_start + FROM + ( + SELECT + 0 AS total, + dateTrunc({interval}, {date_to}) - {number_interval_period} AS day_start + FROM + numbers( + coalesce(dateDiff({interval}, {date_from}, {date_to}), 0) + ) + UNION ALL + SELECT + 0 AS total, + {date_from} + UNION ALL + SELECT + {aggregation_operation} AS total, + dateTrunc({interval}, toTimeZone(toDateTime(timestamp), 'UTC')) AS date + FROM events AS e + %s + WHERE {events_filter} + GROUP BY date + ) + GROUP BY day_start + ORDER BY day_start ASC + ) + """ + % (self.sample_value()), + placeholders={ + **date_placeholders, + "events_filter": self.events_filter(series), + "aggregation_operation": self.aggregation_operation(series.series), + }, + timings=self.timings, + ) + ) + return queries + + def _is_stale(self, cached_result_package): + date_to = self.query_date_range.date_to() + interval = self.query_date_range.interval_name + return is_stale(self.team, date_to, interval, cached_result_package) + + def _refresh_frequency(self): + date_to = self.query_date_range.date_to() + date_from = self.query_date_range.date_from() + interval = self.query_date_range.interval_name + + delta_days: Optional[int] = None + if date_from and date_to: + delta = date_to - date_from + delta_days = ceil(delta.total_seconds() / timedelta(days=1).total_seconds()) + + refresh_frequency = BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL + if interval == "hour" or (delta_days is not None and delta_days <= 7): + # The interval is shorter for short-term insights + refresh_frequency = REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL + + return refresh_frequency + + def to_persons_query(self) -> str: + # TODO: add support for selecting and filtering by breakdowns + raise NotImplementedError() + + def calculate(self): + queries = self.to_query() + + res = [] + timings = [] + + for index, query in enumerate(queries): + series_with_extra = self.series[index] + + response = execute_hogql_query( + query_type="TrendsQuery", + query=query, + team=self.team, + timings=self.timings, + ) + + timings.extend(response.timings) + + res.extend(self.build_series_response(response, series_with_extra)) + + if self.query.trendsFilter is not None and self.query.trendsFilter.formula is not None: + res = self.apply_formula(self.query.trendsFilter.formula, res) + + return TrendsQueryResponse(result=res, timings=timings) + + def build_series_response(self, response: HogQLQueryResponse, series: SeriesWithExtras): + if response.results is None: + return [] + + res = [] + for val in response.results: + series_object = { + "data": val[1], + "labels": [item.strftime("%-d-%b-%Y") for item in val[0]], # Add back in hour formatting + "days": [item.strftime("%Y-%m-%d") for item in val[0]], # Add back in hour formatting + "count": float(sum(val[1])), + "label": "All events" if self.series_event(series.series) is None else self.series_event(series.series), + } + + # Modifications for when comparing to previous period + if self.query.trendsFilter is not None and self.query.trendsFilter.compare: + labels = [ + "{} {}".format(self.query.interval if self.query.interval is not None else "day", i) + for i in range(len(series_object["labels"])) + ] + + series_object["compare"] = True + series_object["compare_label"] = "previous" if series.is_previous_period_series else "current" + series_object["labels"] = labels + + res.append(series_object) + return res + + @cached_property + def query_date_range(self): + return QueryDateRange( + date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + ) + + @cached_property + def query_previous_date_range(self): + return QueryPreviousPeriodDateRange( + date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + ) + + def aggregation_operation(self, series: EventsNode | ActionsNode) -> ast.Expr: + if series.math == "hogql": + return parse_expr(series.math_hogql) + + return parse_expr("count(*)") + + def events_filter(self, series_with_extra: SeriesWithExtras) -> ast.Expr: + series = series_with_extra.series + filters: List[ast.Expr] = [] + + # Team ID + filters.append(parse_expr("team_id = {team_id}", placeholders={"team_id": ast.Constant(value=self.team.pk)})) + + if not series_with_extra.is_previous_period_series: + # Dates (current period) + filters.extend( + [ + parse_expr( + "(toTimeZone(timestamp, 'UTC') >= {date_from})", + placeholders=self.query_date_range.to_placeholders(), + ), + parse_expr( + "(toTimeZone(timestamp, 'UTC') <= {date_to})", + placeholders=self.query_date_range.to_placeholders(), + ), + ] + ) + else: + # Date (previous period) + filters.extend( + [ + parse_expr( + "(toTimeZone(timestamp, 'UTC') >= {date_from})", + placeholders=self.query_previous_date_range.to_placeholders(), + ), + parse_expr( + "(toTimeZone(timestamp, 'UTC') <= {date_to})", + placeholders=self.query_previous_date_range.to_placeholders(), + ), + ] + ) + + # Series + if self.series_event(series) is not None: + filters.append( + parse_expr("event = {event}", placeholders={"event": ast.Constant(value=self.series_event(series))}) + ) + + # Filter Test Accounts + if ( + self.query.filterTestAccounts + and isinstance(self.team.test_account_filters, list) + and len(self.team.test_account_filters) > 0 + ): + for property in self.team.test_account_filters: + filters.append(property_to_expr(property, self.team)) + + # Properties + if self.query.properties is not None and self.query.properties != []: + filters.append(property_to_expr(self.query.properties, self.team)) + + # Series Filters + if series.properties is not None and series.properties != []: + filters.append(property_to_expr(series.properties, self.team)) + + if len(filters) == 0: + return ast.Constant(value=True) + elif len(filters) == 1: + return filters[0] + else: + return ast.And(exprs=filters) + + # Using string interpolation for SAMPLE due to HogQL limitations with `UNION ALL` and `SAMPLE` AST nodes + def sample_value(self) -> str: + if self.query.samplingFactor is None: + return "" + + return f"SAMPLE {self.query.samplingFactor}" + + def series_event(self, series: EventsNode | ActionsNode) -> str | None: + if isinstance(series, EventsNode): + return series.event + return None + + def setup_series(self) -> List[SeriesWithExtras]: + if self.query.trendsFilter is not None and self.query.trendsFilter.compare: + updated_series = [] + for series in self.query.series: + updated_series.append(SeriesWithExtras(series, is_previous_period_series=False)) + updated_series.append(SeriesWithExtras(series, is_previous_period_series=True)) + return updated_series + + return [SeriesWithExtras(series, is_previous_period_series=False) for series in self.query.series] + + def apply_formula(self, formula: str, results: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + if self.query.trendsFilter is not None and self.query.trendsFilter.compare: + sorted_results = sorted(results, key=itemgetter("compare_label")) + res = [] + for _, group in groupby(sorted_results, key=itemgetter("compare_label")): + group_list = list(group) + + series_data = map(lambda s: s["data"], group_list) + new_series_data = FormulaAST(series_data).call(formula) + + new_result = group_list[0] + new_result["data"] = new_series_data + new_result["count"] = float(sum(new_series_data)) + new_result["label"] = f"Formula ({formula})" + + res.append(new_result) + return res + + series_data = map(lambda s: s["data"], results) + new_series_data = FormulaAST(series_data).call(formula) + new_result = results[0] + + new_result["data"] = new_series_data + new_result["count"] = float(sum(new_series_data)) + new_result["label"] = f"Formula ({formula})" + + return [new_result] diff --git a/posthog/hogql_queries/utils/formula_ast.py b/posthog/hogql_queries/utils/formula_ast.py new file mode 100644 index 0000000000000..95fa476d6fcee --- /dev/null +++ b/posthog/hogql_queries/utils/formula_ast.py @@ -0,0 +1,67 @@ +import ast +import operator +from typing import Any, Dict, List + + +class FormulaAST: + op_map = { + ast.Add: operator.add, + ast.Sub: operator.sub, + ast.Mult: operator.mul, + ast.Div: operator.truediv, + ast.Mod: operator.mod, + ast.Pow: operator.pow, + } + zipped_data: List[tuple[float]] + + def __init__(self, data: List[List[float]]): + self.zipped_data = list(zip(*data)) + + def call(self, node: str): + res = [] + for consts in self.zipped_data: + map = {} + for index, value in enumerate(consts): + map[chr(ord("`") + index + 1)] = value + result = self._evaluate(node.lower(), map) + res.append(result) + return res + + def _evaluate(self, node, const_map: Dict[str, Any]): + if isinstance(node, (list, tuple)): + return [self._evaluate(sub_node, const_map) for sub_node in node] + + elif isinstance(node, str): + return self._evaluate(ast.parse(node), const_map) + + elif isinstance(node, ast.Module): + values = [] + for body in node.body: + values.append(self._evaluate(body, const_map)) + if len(values) == 1: + values = values[0] + return values + + elif isinstance(node, ast.Expr): + return self._evaluate(node.value, const_map) + + elif isinstance(node, ast.BinOp): + left = self._evaluate(node.left, const_map) + op = node.op + right = self._evaluate(node.right, const_map) + + try: + return self.op_map[type(op)](left, right) + except KeyError: + raise ValueError(f"Operator {op.__class__.__name__} not supported") + + elif isinstance(node, ast.Num): + return node.n + + elif isinstance(node, ast.Name): + try: + return const_map[node.id] + except KeyError: + raise ValueError(f"Constant {node.id} not supported") + + raise TypeError(f"Unsupported operation: {node.__class__.__name__}") diff --git a/posthog/hogql_queries/utils/query_previous_period_date_range.py b/posthog/hogql_queries/utils/query_previous_period_date_range.py new file mode 100644 index 0000000000000..ac16f0b9eec10 --- /dev/null +++ b/posthog/hogql_queries/utils/query_previous_period_date_range.py @@ -0,0 +1,61 @@ +from datetime import datetime +from typing import Optional, Dict, Tuple + +from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.models.team import Team +from posthog.schema import DateRange, IntervalType +from posthog.utils import get_compare_period_dates, relative_date_parse_with_delta_mapping + + +# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries +class QueryPreviousPeriodDateRange(QueryDateRange): + """Translation of the raw `date_from` and `date_to` filter values to datetimes.""" + + _team: Team + _date_range: Optional[DateRange] + _interval: Optional[IntervalType] + _now_without_timezone: datetime + + def __init__( + self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime + ) -> None: + super().__init__(date_range, team, interval, now) + + def date_from_delta_mappings(self) -> Dict[str, int] | None: + if self._date_range and isinstance(self._date_range.date_from, str) and self._date_range.date_from != "all": + delta_mapping = relative_date_parse_with_delta_mapping( + self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone + )[1] + return delta_mapping + + return None + + def date_to_delta_mappings(self) -> Dict[str, int] | None: + if self._date_range and self._date_range.date_to: + delta_mapping = relative_date_parse_with_delta_mapping( + self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone + )[1] + return delta_mapping + return None + + def dates(self) -> Tuple[datetime, datetime]: + current_period_date_from = super().date_from() + current_period_date_to = super().date_to() + + previous_period_date_from, previous_period_date_to = get_compare_period_dates( + current_period_date_from, + current_period_date_to, + self.date_from_delta_mappings(), + self.date_to_delta_mappings(), + self.interval_name, + ) + + return previous_period_date_from, previous_period_date_to + + def date_to(self) -> datetime: + previous_period_date_to = self.dates()[1] + return previous_period_date_to + + def date_from(self) -> datetime: + previous_period_date_from = self.dates()[0] + return previous_period_date_from diff --git a/posthog/migrations/0351_team_surveys_opt_in.py b/posthog/migrations/0351_team_surveys_opt_in.py new file mode 100644 index 0000000000000..c1722b7a11000 --- /dev/null +++ b/posthog/migrations/0351_team_surveys_opt_in.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.19 on 2023-09-20 14:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("posthog", "0350_add_notebook_text_content"), + ] + + operations = [ + migrations.AddField( + model_name="team", + name="surveys_opt_in", + field=models.BooleanField(blank=True, null=True), + ), + ] diff --git a/posthog/models/filters/__init__.py b/posthog/models/filters/__init__.py index db19d8addc105..fa75a96fc7308 100644 --- a/posthog/models/filters/__init__.py +++ b/posthog/models/filters/__init__.py @@ -19,3 +19,5 @@ AnyFilter: TypeAlias = ( Filter | PathFilter | RetentionFilter | StickinessFilter | SessionRecordingsFilter | PropertiesTimelineFilter ) + +AnyInsightFilter: TypeAlias = Filter | PathFilter | RetentionFilter | StickinessFilter diff --git a/posthog/models/filters/filter.py b/posthog/models/filters/filter.py index e0549650981e6..816e1a846d7fe 100644 --- a/posthog/models/filters/filter.py +++ b/posthog/models/filters/filter.py @@ -1,5 +1,6 @@ from .base_filter import BaseFilter from .mixins.common import ( + AggregationAxisMixin, BreakdownMixin, BreakdownValueMixin, ClientQueryIdMixin, @@ -88,6 +89,7 @@ class Filter( UpdatedAfterMixin, ClientQueryIdMixin, SampleMixin, + AggregationAxisMixin, BaseFilter, ): """ diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py index bbb727407c6be..b7303ea3e3ebf 100644 --- a/posthog/models/filters/mixins/common.py +++ b/posthog/models/filters/mixins/common.py @@ -592,3 +592,21 @@ def sampling_factor(self) -> Optional[float]: @include_dict def sampling_factor_to_dict(self): return {SAMPLING_FACTOR: self.sampling_factor or ""} + + +class AggregationAxisMixin(BaseParamMixin): + """ + Aggregation Axis. Only used frontend side. + """ + + @cached_property + def aggregation_axis_format(self) -> Optional[str]: + return self._data.get("aggregation_axis_format", None) + + @cached_property + def aggregation_axis_prefix(self) -> Optional[str]: + return self._data.get("aggregation_axis_prefix", None) + + @cached_property + def aggregation_axis_postfix(self) -> Optional[str]: + return self._data.get("aggregation_axis_postfix", None) diff --git a/posthog/models/filters/retention_filter.py b/posthog/models/filters/retention_filter.py index 0d9e1568c5d3d..cd767606a6dd1 100644 --- a/posthog/models/filters/retention_filter.py +++ b/posthog/models/filters/retention_filter.py @@ -45,7 +45,10 @@ class RetentionFilter( BaseFilter, ): def __init__(self, data: Dict[str, Any] = {}, request: Optional[Request] = None, **kwargs) -> None: - data["insight"] = INSIGHT_RETENTION + if data: + data["insight"] = INSIGHT_RETENTION + else: + data = {"insight": INSIGHT_RETENTION} super().__init__(data, request, **kwargs) @cached_property diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py index 5327406c90b95..dbabdd5e6897a 100644 --- a/posthog/models/filters/stickiness_filter.py +++ b/posthog/models/filters/stickiness_filter.py @@ -4,6 +4,8 @@ from rest_framework.exceptions import ValidationError from rest_framework.request import Request +from posthog.constants import INSIGHT_STICKINESS + from .base_filter import BaseFilter from .mixins.common import ( ClientQueryIdMixin, @@ -54,6 +56,10 @@ class StickinessFilter( team: "Team" def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: + if data: + data["insight"] = INSIGHT_STICKINESS + else: + data = {"insight": INSIGHT_STICKINESS} super().__init__(data, request, **kwargs) team: Optional["Team"] = kwargs.get("team", None) if not team: diff --git a/posthog/models/filters/test/test_stickiness_filter.py b/posthog/models/filters/test/test_stickiness_filter.py index b2b5d70bda46d..85ef23a2e83be 100644 --- a/posthog/models/filters/test/test_stickiness_filter.py +++ b/posthog/models/filters/test/test_stickiness_filter.py @@ -37,7 +37,7 @@ def test_filter_properties(self): "properties": {}, } ], - "insight": "TRENDS", + "insight": "STICKINESS", "interval": "month", "sampling_factor": 0.1, }, diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 5b3ceb039519f..73f1231d33bb0 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -150,6 +150,7 @@ class Team(UUIDClassicModel): session_recording_opt_in: models.BooleanField = models.BooleanField(default=False) capture_console_log_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) capture_performance_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) + surveys_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) session_recording_version: models.CharField = models.CharField(null=True, blank=True, max_length=24) signup_token: models.CharField = models.CharField(max_length=200, null=True, blank=True) is_demo: models.BooleanField = models.BooleanField(default=False) diff --git a/posthog/schema.py b/posthog/schema.py index 214b802e27558..207fb07d6e62c 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -124,13 +124,10 @@ class ElementType(BaseModel): class EmptyPropertyFilter(BaseModel): + pass model_config = ConfigDict( extra="forbid", ) - key: Optional[Any] = None - operator: Optional[Any] = None - type: Optional[Any] = None - value: Optional[Any] = None class EntityType(str, Enum): @@ -185,18 +182,7 @@ class FunnelConversionWindowTimeUnit(str, Enum): month = "month" -class FunnelLayout(str, Enum): - horizontal = "horizontal" - vertical = "vertical" - - -class FunnelPathType(str, Enum): - funnel_path_before_step = "funnel_path_before_step" - funnel_path_between_steps = "funnel_path_between_steps" - funnel_path_after_step = "funnel_path_after_step" - - -class FunnelStepRangeEntityFilter(BaseModel): +class FunnelExclusion(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -210,6 +196,17 @@ class FunnelStepRangeEntityFilter(BaseModel): type: Optional[EntityType] = None +class FunnelLayout(str, Enum): + horizontal = "horizontal" + vertical = "vertical" + + +class FunnelPathType(str, Enum): + funnel_path_before_step = "funnel_path_before_step" + funnel_path_between_steps = "funnel_path_between_steps" + funnel_path_after_step = "funnel_path_after_step" + + class FunnelStepReference(str, Enum): total = "total" previous = "previous" @@ -221,11 +218,6 @@ class FunnelVizType(str, Enum): trends = "trends" -class FunnelCorrelationPersonConverted(str, Enum): - true = "true" - false = "false" - - class HogQLNotice(BaseModel): model_config = ConfigDict( extra="forbid", @@ -278,11 +270,8 @@ class PathsFilter(BaseModel): local_path_cleaning_filters: Optional[List[PathCleaningFilter]] = None max_edge_weight: Optional[float] = None min_edge_weight: Optional[float] = None - path_dropoff_key: Optional[str] = None - path_end_key: Optional[str] = None path_groupings: Optional[List[str]] = None path_replacements: Optional[bool] = None - path_start_key: Optional[str] = None path_type: Optional[PathType] = None paths_hogql_expression: Optional[str] = None start_point: Optional[str] = None @@ -440,7 +429,6 @@ class StickinessFilter(BaseModel): show_legend: Optional[bool] = None show_values_on_series: Optional[bool] = None shown_as: Optional[ShownAsValue] = None - stickiness_days: Optional[float] = None class TimeToSeeDataSessionsQueryResponse(BaseModel): @@ -469,6 +457,17 @@ class TrendsFilter(BaseModel): smoothing_intervals: Optional[float] = None +class TrendsQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + is_cached: Optional[bool] = None + last_refresh: Optional[str] = None + next_allowed_client_refresh: Optional[str] = None + result: List[Dict[str, Any]] + timings: Optional[List[QueryTiming]] = None + + class Breakdown(BaseModel): model_config = ConfigDict( extra="forbid", @@ -541,18 +540,10 @@ class FunnelsFilter(BaseModel): bin_count: Optional[Union[float, str]] = None breakdown_attribution_type: Optional[BreakdownAttributionType] = None breakdown_attribution_value: Optional[float] = None - drop_off: Optional[bool] = None - entrance_period_start: Optional[str] = None - exclusions: Optional[List[FunnelStepRangeEntityFilter]] = None - funnel_advanced: Optional[bool] = None + exclusions: Optional[List[FunnelExclusion]] = None funnel_aggregate_by_hogql: Optional[str] = None - funnel_correlation_person_converted: Optional[FunnelCorrelationPersonConverted] = None - funnel_correlation_person_entity: Optional[Dict[str, Any]] = None - funnel_custom_steps: Optional[List[float]] = None funnel_from_step: Optional[float] = None funnel_order_type: Optional[StepOrderValue] = None - funnel_step: Optional[float] = None - funnel_step_breakdown: Optional[Union[str, List[float], float]] = None funnel_step_reference: Optional[FunnelStepReference] = None funnel_to_step: Optional[float] = None funnel_viz_type: Optional[FunnelVizType] = None @@ -1111,6 +1102,7 @@ class TrendsQuery(BaseModel): PropertyGroupFilter, ] ] = Field(default=None, description="Property filters for all series") + response: Optional[TrendsQueryResponse] = None samplingFactor: Optional[float] = Field(default=None, description="Sampling rate") series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include") trendsFilter: Optional[TrendsFilter] = Field(default=None, description="Properties specific to the trends insight") diff --git a/posthog/settings/web.py b/posthog/settings/web.py index ca0c035765a7e..b062ce632a71a 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -320,6 +320,7 @@ def add_recorder_js_headers(headers, path, url): "^/api/organizations/@current/plugins/?$", "^api/projects/@current/feature_flags/my_flags/?$", "^/?api/projects/\\d+/query/?$", + "^/?api/instance_status/?$", ] ), ) diff --git a/posthog/temporal/tests/batch_exports/fixtures.py b/posthog/temporal/tests/batch_exports/fixtures.py index 65de3fd4910c3..d54db02304cc5 100644 --- a/posthog/temporal/tests/batch_exports/fixtures.py +++ b/posthog/temporal/tests/batch_exports/fixtures.py @@ -1,7 +1,13 @@ from uuid import UUID + from asgiref.sync import sync_to_async +from temporalio.client import Client -from posthog.batch_exports.models import BatchExport, BatchExportDestination, BatchExportRun +from posthog.batch_exports.models import ( + BatchExport, + BatchExportDestination, + BatchExportRun, +) from posthog.batch_exports.service import sync_batch_export @@ -32,3 +38,11 @@ def fetch_batch_export_runs(batch_export_id: UUID, limit: int = 100) -> list[Bat async def afetch_batch_export_runs(batch_export_id: UUID, limit: int = 100) -> list[BatchExportRun]: """Fetch the BatchExportRuns for a given BatchExport.""" return await sync_to_async(fetch_batch_export_runs)(batch_export_id, limit) # type: ignore + + +async def adelete_batch_export(batch_export: BatchExport, temporal: Client) -> None: + """Delete a BatchExport and its underlying Schedule.""" + handle = temporal.get_schedule_handle(str(batch_export.id)) + await handle.delete() + + await sync_to_async(batch_export.delete)() # type: ignore diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index 50ee763b5d4d9..2bd5dd4084db1 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -270,7 +270,7 @@ async def test_get_results_iterator(client): "elements_chain": "this that and the other", "elements": json.dumps("this that and the other"), "ip": "127.0.0.1", - "site_url": "http://localhost.com", + "site_url": "", "set": None, "set_once": None, } @@ -327,7 +327,7 @@ async def test_get_results_iterator_handles_duplicates(client): "elements_chain": "this that and the other", "elements": json.dumps("this that and the other"), "ip": "127.0.0.1", - "site_url": "http://localhost.com", + "site_url": "", "set": None, "set_once": None, } @@ -387,7 +387,7 @@ async def test_get_results_iterator_can_exclude_events(client): "elements_chain": "this that and the other", "elements": json.dumps("this that and the other"), "ip": "127.0.0.1", - "site_url": "http://localhost.com", + "site_url": "", "set": None, "set_once": None, } diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index b0d45d55f4b45..8a843831217b8 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -1,19 +1,25 @@ +import asyncio import datetime as dt import json +import os from random import randint from uuid import uuid4 -import os import pytest +import pytest_asyncio +from asgiref.sync import sync_to_async from django.conf import settings from freezegun.api import freeze_time from google.cloud import bigquery +from temporalio import activity +from temporalio.client import WorkflowFailureError from temporalio.common import RetryPolicy from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker from posthog.api.test.test_organization import acreate_organization from posthog.api.test.test_team import acreate_team +from posthog.temporal.client import connect from posthog.temporal.tests.batch_exports.base import ( EventValues, amaterialize, @@ -21,16 +27,17 @@ ) from posthog.temporal.tests.batch_exports.fixtures import ( acreate_batch_export, + adelete_batch_export, afetch_batch_export_runs, ) from posthog.temporal.workflows.base import create_export_run, update_export_run_status -from posthog.temporal.workflows.clickhouse import ClickHouseClient from posthog.temporal.workflows.bigquery_batch_export import ( BigQueryBatchExportInputs, BigQueryBatchExportWorkflow, BigQueryInsertInputs, insert_into_bigquery_activity, ) +from posthog.temporal.workflows.clickhouse import ClickHouseClient TEST_TIME = dt.datetime.utcnow() @@ -60,7 +67,7 @@ def assert_events_in_bigquery(client, table_id, dataset_id, events, bq_ingested_ "properties": event.get("properties"), "set": properties.get("$set", None) if properties else None, "set_once": properties.get("$set_once", None) if properties else None, - "site_url": properties.get("$current_url", None) if properties else None, + "site_url": "", # For compatibility with CH which doesn't parse timezone component, so we add it here assuming UTC. "timestamp": dt.datetime.fromisoformat(event.get("timestamp") + "+00:00"), "team_id": event.get("team_id"), @@ -406,3 +413,145 @@ async def test_bigquery_export_workflow( events=events, bq_ingested_timestamp=ingested_timestamp, ) + + +@pytest_asyncio.fixture +async def organization(): + organization = await acreate_organization("test") + yield organization + await sync_to_async(organization.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def team(organization): + team = await acreate_team(organization=organization) + yield team + await sync_to_async(team.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def batch_export(team): + destination_data = { + "type": "BigQuery", + "config": { + "table_id": f"test_workflow_table_{team.pk}", + "project_id": "project_id", + "private_key": "private_key", + "private_key_id": "private_key_id", + "token_uri": "token_uri", + "client_email": "client_email", + "dataset_id": "BatchExports", + }, + } + batch_export_data = { + "name": "my-production-bigquery-export", + "destination": destination_data, + "interval": "hour", + } + + batch_export = await acreate_batch_export( + team_id=team.pk, + name=batch_export_data["name"], + destination_data=batch_export_data["destination"], + interval=batch_export_data["interval"], + ) + + yield batch_export + + client = await connect( + settings.TEMPORAL_HOST, + settings.TEMPORAL_PORT, + settings.TEMPORAL_NAMESPACE, + settings.TEMPORAL_CLIENT_ROOT_CA, + settings.TEMPORAL_CLIENT_CERT, + settings.TEMPORAL_CLIENT_KEY, + ) + await adelete_batch_export(batch_export, client) + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_bigquery_export_workflow_handles_insert_activity_errors(team, batch_export): + """Test that BigQuery Export Workflow can gracefully handle errors when inserting BigQuery data.""" + workflow_id = str(uuid4()) + inputs = BigQueryBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_bigquery_activity") + async def insert_into_bigquery_activity_mocked(_: BigQueryInsertInputs) -> str: + raise ValueError("A useful error message") + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[BigQueryBatchExportWorkflow], + activities=[create_export_run, insert_into_bigquery_activity_mocked, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with pytest.raises(WorkflowFailureError): + await activity_environment.client.execute_workflow( + BigQueryBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Failed" + assert run.latest_error == "ValueError: A useful error message" + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_bigquery_export_workflow_handles_cancellation(team, batch_export): + """Test that BigQuery Export Workflow can gracefully handle cancellations when inserting BigQuery data.""" + workflow_id = str(uuid4()) + inputs = BigQueryBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_s3_activity") + async def never_finish_activity(_: BigQueryInsertInputs) -> str: + while True: + activity.heartbeat() + await asyncio.sleep(1) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[BigQueryBatchExportWorkflow], + activities=[create_export_run, never_finish_activity, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + handle = await activity_environment.client.start_workflow( + BigQueryBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" diff --git a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py index 499acbd29502d..ef98f1725fab1 100644 --- a/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py @@ -1,3 +1,4 @@ +import asyncio import datetime as dt import json from random import randint @@ -5,15 +6,20 @@ import psycopg2 import pytest +import pytest_asyncio +from asgiref.sync import sync_to_async from django.conf import settings from django.test import override_settings from psycopg2 import sql +from temporalio import activity +from temporalio.client import WorkflowFailureError from temporalio.common import RetryPolicy from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker from posthog.api.test.test_organization import acreate_organization from posthog.api.test.test_team import acreate_team +from posthog.temporal.client import connect from posthog.temporal.tests.batch_exports.base import ( EventValues, amaterialize, @@ -21,6 +27,7 @@ ) from posthog.temporal.tests.batch_exports.fixtures import ( acreate_batch_export, + adelete_batch_export, afetch_batch_export_runs, ) from posthog.temporal.workflows.base import create_export_run, update_export_run_status @@ -59,7 +66,8 @@ def assert_events_in_postgres(connection, schema, table_name, events): "properties": event.get("properties"), "set": properties.get("$set", None) if properties else None, "set_once": properties.get("$set_once", None) if properties else None, - "site_url": properties.get("$current_url", None) if properties else None, + # Kept for backwards compatibility, but not exported anymore. + "site_url": None, # For compatibility with CH which doesn't parse timezone component, so we add it here assuming UTC. "timestamp": dt.datetime.fromisoformat(event.get("timestamp") + "+00:00"), "team_id": event.get("team_id"), @@ -439,3 +447,135 @@ async def test_postgres_export_workflow( assert run.status == "Completed" assert_events_in_postgres(postgres_connection, postgres_config["schema"], table_name, events) + + +@pytest_asyncio.fixture +async def organization(): + organization = await acreate_organization("test") + yield organization + await sync_to_async(organization.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def team(organization): + team = await acreate_team(organization=organization) + yield team + await sync_to_async(team.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def batch_export(team, postgres_config): + table_name = "test_workflow_table" + destination_data = {"type": "Postgres", "config": {**postgres_config, "table_name": table_name}} + batch_export_data = { + "name": "my-production-postgres-export", + "destination": destination_data, + "interval": "hour", + } + + batch_export = await acreate_batch_export( + team_id=team.pk, + name=batch_export_data["name"], + destination_data=batch_export_data["destination"], + interval=batch_export_data["interval"], + ) + + yield batch_export + + client = await connect( + settings.TEMPORAL_HOST, + settings.TEMPORAL_PORT, + settings.TEMPORAL_NAMESPACE, + settings.TEMPORAL_CLIENT_ROOT_CA, + settings.TEMPORAL_CLIENT_CERT, + settings.TEMPORAL_CLIENT_KEY, + ) + await adelete_batch_export(batch_export, client) + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_postgres_export_workflow_handles_insert_activity_errors(team, batch_export): + """Test that Postgres Export Workflow can gracefully handle errors when inserting Postgres data.""" + workflow_id = str(uuid4()) + inputs = PostgresBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_postgres_activity") + async def insert_into_postgres_activity_mocked(_: PostgresInsertInputs) -> str: + raise ValueError("A useful error message") + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[PostgresBatchExportWorkflow], + activities=[create_export_run, insert_into_postgres_activity_mocked, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with pytest.raises(WorkflowFailureError): + await activity_environment.client.execute_workflow( + PostgresBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Failed" + assert run.latest_error == "ValueError: A useful error message" + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_postgres_export_workflow_handles_cancellation(team, batch_export): + """Test that Postgres Export Workflow can gracefully handle cancellations when inserting Postgres data.""" + workflow_id = str(uuid4()) + inputs = PostgresBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_postgres_activity") + async def never_finish_activity(_: PostgresInsertInputs) -> str: + while True: + activity.heartbeat() + await asyncio.sleep(1) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[PostgresBatchExportWorkflow], + activities=[create_export_run, never_finish_activity, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + handle = await activity_environment.client.start_workflow( + PostgresBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 08f0d285a944c..2511580358e72 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -1,3 +1,4 @@ +import asyncio import datetime as dt import functools import gzip @@ -12,15 +13,20 @@ import botocore.exceptions import brotli import pytest +import pytest_asyncio +from asgiref.sync import sync_to_async from django.conf import settings from django.test import Client as HttpClient from django.test import override_settings +from temporalio import activity +from temporalio.client import WorkflowFailureError from temporalio.common import RetryPolicy from temporalio.testing import WorkflowEnvironment from temporalio.worker import UnsandboxedWorkflowRunner, Worker from posthog.api.test.test_organization import acreate_organization from posthog.api.test.test_team import acreate_team +from posthog.temporal.client import connect from posthog.temporal.tests.batch_exports.base import ( EventValues, amaterialize, @@ -29,6 +35,7 @@ ) from posthog.temporal.tests.batch_exports.fixtures import ( acreate_batch_export, + adelete_batch_export, afetch_batch_export_runs, ) from posthog.temporal.workflows.base import create_export_run, update_export_run_status @@ -1072,6 +1079,149 @@ async def test_s3_export_workflow_with_minio_bucket_produces_no_duplicates( assert_events_in_s3(s3_client, bucket_name, prefix, events, compression) +@pytest_asyncio.fixture +async def organization(): + organization = await acreate_organization("test") + yield organization + await sync_to_async(organization.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def team(organization): + team = await acreate_team(organization=organization) + yield team + await sync_to_async(team.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def batch_export(team): + prefix = f"posthog-events-{str(uuid4())}" + destination_data = { + "type": "S3", + "config": { + "bucket_name": "test-bucket", + "region": "us-east-1", + "prefix": prefix, + "aws_access_key_id": "object_storage_root_user", + "aws_secret_access_key": "object_storage_root_password", + "compression": "gzip", + }, + } + + batch_export_data = { + "name": "my-production-s3-bucket-destination", + "destination": destination_data, + "interval": "hour", + } + + batch_export = await acreate_batch_export( + team_id=team.pk, + name=batch_export_data["name"], + destination_data=batch_export_data["destination"], + interval=batch_export_data["interval"], + ) + + yield batch_export + + client = await connect( + settings.TEMPORAL_HOST, + settings.TEMPORAL_PORT, + settings.TEMPORAL_NAMESPACE, + settings.TEMPORAL_CLIENT_ROOT_CA, + settings.TEMPORAL_CLIENT_CERT, + settings.TEMPORAL_CLIENT_KEY, + ) + await adelete_batch_export(batch_export, client) + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_s3_export_workflow_handles_insert_activity_errors(team, batch_export): + """Test that S3 Export Workflow can gracefully handle errors when inserting S3 data.""" + workflow_id = str(uuid4()) + inputs = S3BatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_s3_activity") + async def insert_into_s3_activity_mocked(_: S3InsertInputs) -> str: + raise ValueError("A useful error message") + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[S3BatchExportWorkflow], + activities=[create_export_run, insert_into_s3_activity_mocked, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with pytest.raises(WorkflowFailureError): + await activity_environment.client.execute_workflow( + S3BatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Failed" + assert run.latest_error == "ValueError: A useful error message" + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_s3_export_workflow_handles_cancellation(team, batch_export): + """Test that S3 Export Workflow can gracefully handle cancellations when inserting S3 data.""" + workflow_id = str(uuid4()) + inputs = S3BatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_s3_activity") + async def never_finish_activity(_: S3InsertInputs) -> str: + while True: + activity.heartbeat() + await asyncio.sleep(1) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[S3BatchExportWorkflow], + activities=[create_export_run, never_finish_activity, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + handle = await activity_environment.client.start_workflow( + S3BatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" + + # We don't care about these for the next test, just need something to be defined. base_inputs = { "bucket_name": "test", diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py index 979929d1ce205..af82608baa8a9 100644 --- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py @@ -1,3 +1,4 @@ +import asyncio import datetime as dt import gzip import json @@ -6,10 +7,13 @@ from uuid import uuid4 import pytest +import pytest_asyncio import responses +from asgiref.sync import sync_to_async from django.conf import settings from django.test import override_settings from requests.models import PreparedRequest +from temporalio import activity from temporalio.client import WorkflowFailureError from temporalio.common import RetryPolicy from temporalio.exceptions import ActivityError, ApplicationError @@ -18,6 +22,7 @@ from posthog.api.test.test_organization import acreate_organization from posthog.api.test.test_team import acreate_team +from posthog.temporal.client import connect from posthog.temporal.tests.batch_exports.base import ( EventValues, insert_events, @@ -25,6 +30,7 @@ ) from posthog.temporal.tests.batch_exports.fixtures import ( acreate_batch_export, + adelete_batch_export, afetch_batch_export_runs, ) from posthog.temporal.workflows.base import create_export_run, update_export_run_status @@ -32,6 +38,7 @@ from posthog.temporal.workflows.snowflake_batch_export import ( SnowflakeBatchExportInputs, SnowflakeBatchExportWorkflow, + SnowflakeInsertInputs, insert_into_snowflake_activity, ) @@ -645,3 +652,145 @@ async def test_snowflake_export_workflow_raises_error_on_copy_fail(): assert isinstance(err.__cause__, ActivityError) assert isinstance(err.__cause__.__cause__, ApplicationError) assert err.__cause__.__cause__.type == "SnowflakeFileNotLoadedError" + + +@pytest_asyncio.fixture +async def organization(): + organization = await acreate_organization("test") + yield organization + await sync_to_async(organization.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def team(organization): + team = await acreate_team(organization=organization) + yield team + await sync_to_async(team.delete)() # type: ignore + + +@pytest_asyncio.fixture +async def batch_export(team): + destination_data = { + "type": "Snowflake", + "config": { + "user": "hazzadous", + "password": "password", + "account": "account", + "database": "PostHog", + "schema": "test", + "warehouse": "COMPUTE_WH", + "table_name": "events", + }, + } + batch_export_data = { + "name": "my-production-snowflake-export", + "destination": destination_data, + "interval": "hour", + } + + batch_export = await acreate_batch_export( + team_id=team.pk, + name=batch_export_data["name"], + destination_data=batch_export_data["destination"], + interval=batch_export_data["interval"], + ) + + yield batch_export + + client = await connect( + settings.TEMPORAL_HOST, + settings.TEMPORAL_PORT, + settings.TEMPORAL_NAMESPACE, + settings.TEMPORAL_CLIENT_ROOT_CA, + settings.TEMPORAL_CLIENT_CERT, + settings.TEMPORAL_CLIENT_KEY, + ) + await adelete_batch_export(batch_export, client) + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_snowflake_export_workflow_handles_insert_activity_errors(team, batch_export): + """Test that Snowflake Export Workflow can gracefully handle errors when inserting Snowflake data.""" + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_snowflake_activity") + async def insert_into_snowflake_activity_mocked(_: SnowflakeInsertInputs) -> str: + raise ValueError("A useful error message") + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[create_export_run, insert_into_snowflake_activity_mocked, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + with pytest.raises(WorkflowFailureError): + await activity_environment.client.execute_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Failed" + assert run.latest_error == "ValueError: A useful error message" + + +@pytest.mark.django_db +@pytest.mark.asyncio +async def test_snowflake_export_workflow_handles_cancellation(team, batch_export): + """Test that Snowflake Export Workflow can gracefully handle cancellations when inserting Snowflake data.""" + workflow_id = str(uuid4()) + inputs = SnowflakeBatchExportInputs( + team_id=team.pk, + batch_export_id=str(batch_export.id), + data_interval_end="2023-04-25 14:30:00.000000", + **batch_export.destination.config, + ) + + @activity.defn(name="insert_into_snowflake_activity") + async def never_finish_activity(_: SnowflakeInsertInputs) -> str: + while True: + activity.heartbeat() + await asyncio.sleep(1) + + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: + async with Worker( + activity_environment.client, + task_queue=settings.TEMPORAL_TASK_QUEUE, + workflows=[SnowflakeBatchExportWorkflow], + activities=[create_export_run, never_finish_activity, update_export_run_status], + workflow_runner=UnsandboxedWorkflowRunner(), + ): + handle = await activity_environment.client.start_workflow( + SnowflakeBatchExportWorkflow.run, + inputs, + id=workflow_id, + task_queue=settings.TEMPORAL_TASK_QUEUE, + retry_policy=RetryPolicy(maximum_attempts=1), + ) + await asyncio.sleep(5) + await handle.cancel() + + with pytest.raises(WorkflowFailureError): + await handle.result() + + runs = await afetch_batch_export_runs(batch_export_id=batch_export.id) + assert len(runs) == 1 + + run = runs[0] + assert run.status == "Cancelled" + assert run.latest_error == "Cancelled" diff --git a/posthog/temporal/workflows/batch_exports.py b/posthog/temporal/workflows/batch_exports.py index c79262a0fe86a..4506069dd2bf2 100644 --- a/posthog/temporal/workflows/batch_exports.py +++ b/posthog/temporal/workflows/batch_exports.py @@ -155,7 +155,8 @@ def iter_batch_records(batch) -> typing.Generator[dict[str, typing.Any], None, N "set": properties.get("$set", None) if properties else None, "set_once": properties.get("$set_once", None) if properties else None, "properties": properties, - "site_url": properties.get("$current_url", None) if properties else None, + # Kept for backwards compatibility, but not exported anymore. + "site_url": "", "team_id": record.get("team_id"), "timestamp": record.get("timestamp").isoformat(), "uuid": record.get("uuid").decode(), diff --git a/posthog/temporal/workflows/bigquery_batch_export.py b/posthog/temporal/workflows/bigquery_batch_export.py index f1f247a0672fc..4be09632ff12f 100644 --- a/posthog/temporal/workflows/bigquery_batch_export.py +++ b/posthog/temporal/workflows/bigquery_batch_export.py @@ -6,7 +6,7 @@ from django.conf import settings from google.cloud import bigquery from google.oauth2 import service_account -from temporalio import activity, workflow +from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import BigQueryBatchExportInputs @@ -253,12 +253,21 @@ async def run(self, inputs: BigQueryBatchExportInputs): ), ) + except exceptions.ActivityError as e: + if isinstance(e.cause, exceptions.CancelledError): + workflow.logger.exception("BigQuery BatchExport was cancelled.") + update_inputs.status = "Cancelled" + else: + workflow.logger.exception("BigQuery BatchExport failed.", exc_info=e) + update_inputs.status = "Failed" + + update_inputs.latest_error = str(e.cause) + raise + except Exception as e: - workflow.logger.exception("Bigquery BatchExport failed.", exc_info=e) + workflow.logger.exception("BigQuery BatchExport failed with an unexpected exception.", exc_info=e) update_inputs.status = "Failed" - # Note: This shallows the exception type, but the message should be enough. - # If not, swap to repr(e) - update_inputs.latest_error = str(e) + update_inputs.latest_error = "An unexpected error has ocurred" raise finally: diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py index b81c7496b3adb..b077ac892d698 100644 --- a/posthog/temporal/workflows/postgres_batch_export.py +++ b/posthog/temporal/workflows/postgres_batch_export.py @@ -6,7 +6,7 @@ import psycopg2 from django.conf import settings from psycopg2 import sql -from temporalio import activity, workflow +from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import PostgresBatchExportInputs @@ -254,12 +254,21 @@ async def run(self, inputs: PostgresBatchExportInputs): ), ) + except exceptions.ActivityError as e: + if isinstance(e.cause, exceptions.CancelledError): + workflow.logger.exception("Postgres BatchExport was cancelled.") + update_inputs.status = "Cancelled" + else: + workflow.logger.exception("Postgres BatchExport failed.", exc_info=e) + update_inputs.status = "Failed" + + update_inputs.latest_error = str(e.cause) + raise + except Exception as e: - workflow.logger.exception("Postgres BatchExport failed.", exc_info=e) + workflow.logger.exception("Postgres BatchExport failed with an unexpected exception.", exc_info=e) update_inputs.status = "Failed" - # Note: This shallows the exception type, but the message should be enough. - # If not, swap to repr(e) - update_inputs.latest_error = str(e) + update_inputs.latest_error = "An unexpected error has ocurred" raise finally: diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py index 13bbf183e5d06..a4987b6024338 100644 --- a/posthog/temporal/workflows/s3_batch_export.py +++ b/posthog/temporal/workflows/s3_batch_export.py @@ -7,7 +7,7 @@ import boto3 from django.conf import settings -from temporalio import activity, workflow +from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import S3BatchExportInputs @@ -481,10 +481,21 @@ async def run(self, inputs: S3BatchExportInputs): ), ) + except exceptions.ActivityError as e: + if isinstance(e.cause, exceptions.CancelledError): + workflow.logger.exception("S3 BatchExport was cancelled.") + update_inputs.status = "Cancelled" + else: + workflow.logger.exception("S3 BatchExport failed.", exc_info=e) + update_inputs.status = "Failed" + + update_inputs.latest_error = str(e.cause) + raise + except Exception as e: - workflow.logger.exception("S3 BatchExport failed.", exc_info=e) + workflow.logger.exception("S3 BatchExport failed with an unexpected exception.", exc_info=e) update_inputs.status = "Failed" - update_inputs.latest_error = str(e) + update_inputs.latest_error = "An unexpected error has ocurred" raise finally: diff --git a/posthog/temporal/workflows/snowflake_batch_export.py b/posthog/temporal/workflows/snowflake_batch_export.py index 558e4bda7df75..a38f15d7aab73 100644 --- a/posthog/temporal/workflows/snowflake_batch_export.py +++ b/posthog/temporal/workflows/snowflake_batch_export.py @@ -6,7 +6,7 @@ import snowflake.connector from django.conf import settings from snowflake.connector.cursor import SnowflakeCursor -from temporalio import activity, workflow +from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy from posthog.batch_exports.service import SnowflakeBatchExportInputs @@ -342,12 +342,21 @@ async def run(self, inputs: SnowflakeBatchExportInputs): ), ) + except exceptions.ActivityError as e: + if isinstance(e.cause, exceptions.CancelledError): + workflow.logger.exception("Snowflake BatchExport was cancelled.") + update_inputs.status = "Cancelled" + else: + workflow.logger.exception("Snowflake BatchExport failed.", exc_info=e) + update_inputs.status = "Failed" + + update_inputs.latest_error = str(e.cause) + raise + except Exception as e: - workflow.logger.exception("Snowflake BatchExport failed.", exc_info=e) + workflow.logger.exception("Snowflake BatchExport failed with an unexpected exception.", exc_info=e) update_inputs.status = "Failed" - # Note: This shallows the exception type, but the message should be enough. - # If not, swap to repr(e) - update_inputs.latest_error = str(e) + update_inputs.latest_error = "An unexpected error has ocurred" raise finally: diff --git a/posthog/test/__snapshots__/test_feature_flag.ambr b/posthog/test/__snapshots__/test_feature_flag.ambr index d9ed15e4f2cdc..84422798972fb 100644 --- a/posthog/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/test/__snapshots__/test_feature_flag.ambr @@ -19,6 +19,7 @@ "posthog_team"."session_recording_opt_in", "posthog_team"."capture_console_log_opt_in", "posthog_team"."capture_performance_opt_in", + "posthog_team"."surveys_opt_in", "posthog_team"."session_recording_version", "posthog_team"."signup_token", "posthog_team"."is_demo", diff --git a/posthog/test/base.py b/posthog/test/base.py index 8b66387037a7c..5457bbe4056bc 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -12,6 +12,7 @@ import pytest import sqlparse from django.apps import apps +from django.core.cache import cache from django.db import connection, connections from django.db.migrations.executor import MigrationExecutor from django.test import TestCase, TransactionTestCase, override_settings @@ -232,6 +233,7 @@ class APIBaseTest(TestMixin, ErrorResponsesMixin, DRFTestCase): def setUp(self): super().setUp() + cache.clear() TEST_clear_cloud_cache(self.initial_cloud_mode) TEST_clear_instance_license_cache()