diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 9afecbfc846bd..23cde87e344e1 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -1505,7 +1505,7 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) -# @flaky(max_runs=10, min_passes=1) +@flaky(max_runs=10, min_passes=1) class ClickhouseTestTrendExperimentResults(ClickhouseTestMixin, APILicensedTest): @snapshot_clickhouse_queries def test_experiment_flow_with_event_results(self): diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index b75e26ca37856..ccd517b356cd9 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -2,7 +2,7 @@ from typing import List, Tuple, Union from django.conf import settings import structlog -from celery import group +from celery import chain from prometheus_client import Histogram from posthog.models.dashboard_tile import get_tiles_ordered_by_position @@ -45,8 +45,9 @@ def generate_assets( ExportedAsset.objects.bulk_create(assets) # Wait for all assets to be exported - tasks = [exporter.export_asset.s(asset.id) for asset in assets] - parallel_job = group(tasks).apply_async() + tasks = [exporter.export_asset.si(asset.id) for asset in assets] + # run them one after the other so we don't exhaust celery workers + parallel_job = chain(*tasks).apply_async() wait_for_parallel_celery_group( parallel_job, max_timeout=timedelta(minutes=settings.ASSET_GENERATION_MAX_TIMEOUT_MINUTES) diff --git a/ee/tasks/test/subscriptions/test_subscriptions_utils.py b/ee/tasks/test/subscriptions/test_subscriptions_utils.py index 440dcc97904f4..decdc8269e1e7 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions_utils.py +++ b/ee/tasks/test/subscriptions/test_subscriptions_utils.py @@ -12,7 +12,7 @@ from posthog.test.base import APIBaseTest -@patch("ee.tasks.subscriptions.subscription_utils.group") +@patch("ee.tasks.subscriptions.subscription_utils.chain") @patch("ee.tasks.subscriptions.subscription_utils.exporter.export_asset") class TestSubscriptionsTasksUtils(APIBaseTest): dashboard: Dashboard @@ -36,7 +36,7 @@ def test_generate_assets_for_insight(self, mock_export_task: MagicMock, _mock_gr assert insights == [self.insight] assert len(assets) == 1 - assert mock_export_task.s.call_count == 1 + assert mock_export_task.si.call_count == 1 def test_generate_assets_for_dashboard(self, mock_export_task: MagicMock, _mock_group: MagicMock) -> None: subscription = create_subscription(team=self.team, dashboard=self.dashboard, created_by=self.user) @@ -46,7 +46,7 @@ def test_generate_assets_for_dashboard(self, mock_export_task: MagicMock, _mock_ assert len(insights) == len(self.tiles) assert len(assets) == DEFAULT_MAX_ASSET_COUNT - assert mock_export_task.s.call_count == DEFAULT_MAX_ASSET_COUNT + assert mock_export_task.si.call_count == DEFAULT_MAX_ASSET_COUNT def test_raises_if_missing_resource(self, _mock_export_task: MagicMock, _mock_group: MagicMock) -> None: subscription = create_subscription(team=self.team, created_by=self.user) @@ -70,7 +70,7 @@ def test_excludes_deleted_insights_for_dashboard(self, mock_export_task: MagicMo assert len(insights) == 1 assert len(assets) == 1 - assert mock_export_task.s.call_count == 1 + assert mock_export_task.si.call_count == 1 def test_cancels_children_if_timed_out(self, _mock_export_task: MagicMock, mock_group: MagicMock) -> None: # mock the group so that its children are never ready, diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey.png b/frontend/__snapshots__/scenes-app-surveys--new-survey.png index 7919d71d4d68e..5671de85e8c52 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey.png differ diff --git a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx index b6be6e7961e8b..285d1baa5c51d 100644 --- a/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx +++ b/frontend/src/lib/components/CommandPalette/commandPaletteLogic.tsx @@ -246,7 +246,7 @@ export const commandPaletteLogic = kea({ display: `View person ${input}`, executor: () => { const { push } = router.actions - push(urls.person(person.distinct_ids[0])) + push(urls.personByDistinctId(person.distinct_ids[0])) }, }, ], diff --git a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx index f30336b7bb5a2..cbacb2ef845b3 100644 --- a/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx +++ b/frontend/src/lib/components/UniversalSearch/UniversalSearchPopover.tsx @@ -91,7 +91,7 @@ function redirectOnSelectItems( } else if (groupType === TaxonomicFilterGroupType.Cohorts) { router.actions.push(urls.cohort(value)) } else if (groupType === TaxonomicFilterGroupType.Persons) { - router.actions.push(urls.person(String(value))) + router.actions.push(urls.personByDistinctId(String(value))) } else if (groupType.startsWith(TaxonomicFilterGroupType.GroupNamesPrefix)) { router.actions.push(urls.group((item as Group).group_type_index, String(value))) } else if (groupType === TaxonomicFilterGroupType.Insights) { diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 660530a291cf9..758ddcf6e38e0 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -157,7 +157,6 @@ export const FEATURE_FLAGS = { FF_DASHBOARD_TEMPLATES: 'ff-dashboard-templates', // owner: @EDsCODE SHOW_PRODUCT_INTRO_EXISTING_PRODUCTS: 'show-product-intro-existing-products', // owner: @raquelmsmith ARTIFICIAL_HOG: 'artificial-hog', // owner: @Twixes - SURVEYS_MULTIPLE_CHOICE: 'surveys-multiple-choice', // owner: @liyiy CS_DASHBOARDS: 'cs-dashboards', // owner: @pauldambra PRODUCT_SPECIFIC_ONBOARDING: 'product-specific-onboarding', // owner: @raquelmsmith REDIRECT_SIGNUPS_TO_INSTANCE: 'redirect-signups-to-instance', // owner: @raquelmsmith @@ -169,6 +168,7 @@ export const FEATURE_FLAGS = { HOGQL_INSIGHTS: 'hogql-insights', // owner: @mariusandra WEBHOOKS_DENYLIST: 'webhooks-denylist', // owner: #team-pipeline SURVEYS_SITE_APP_DEPRECATION: 'surveys-site-app-deprecation', // owner: @neilkakkar + SURVEYS_MULTIPLE_QUESTIONS: 'surveys-multiple-questions', // owner: @liyiy } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/queries/nodes/DataTable/queryFeatures.ts b/frontend/src/queries/nodes/DataTable/queryFeatures.ts index eacd6ed5e31d1..7416b323d0418 100644 --- a/frontend/src/queries/nodes/DataTable/queryFeatures.ts +++ b/frontend/src/queries/nodes/DataTable/queryFeatures.ts @@ -2,6 +2,7 @@ import { isEventsQuery, isHogQLQuery, isPersonsNode, + isWebOverviewStatsQuery, isWebTopClicksQuery, isWebTopPagesQuery, isWebTopSourcesQuery, @@ -47,7 +48,12 @@ export function getQueryFeatures(query: Node): Set { features.add(QueryFeature.personsSearch) } - if (isWebTopSourcesQuery(query) || isWebTopPagesQuery(query) || isWebTopClicksQuery(query)) { + if ( + isWebOverviewStatsQuery(query) || + isWebTopSourcesQuery(query) || + isWebTopPagesQuery(query) || + isWebTopClicksQuery(query) + ) { features.add(QueryFeature.columnsInResponse) features.add(QueryFeature.resultIsArrayOfArrays) } diff --git a/frontend/src/queries/nodes/DataTable/renderColumn.tsx b/frontend/src/queries/nodes/DataTable/renderColumn.tsx index 62530d88d8ee7..88b749007301e 100644 --- a/frontend/src/queries/nodes/DataTable/renderColumn.tsx +++ b/frontend/src/queries/nodes/DataTable/renderColumn.tsx @@ -205,7 +205,7 @@ export function renderColumn( } else if (key === 'person' && isPersonsNode(query.source)) { const personRecord = record as PersonType return ( - + ) diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index d77742371abd6..404038c58b04d 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -96,6 +96,9 @@ { "$ref": "#/definitions/TimeToSeeDataSessionsQuery" }, + { + "$ref": "#/definitions/WebOverviewStatsQuery" + }, { "$ref": "#/definitions/WebTopSourcesQuery" }, @@ -399,6 +402,9 @@ { "$ref": "#/definitions/TimeToSeeDataSessionsQuery" }, + { + "$ref": "#/definitions/WebOverviewStatsQuery" + }, { "$ref": "#/definitions/WebTopSourcesQuery" }, @@ -1207,6 +1213,10 @@ "response": { "$ref": "#/definitions/HogQLQueryResponse", "description": "Cached query response" + }, + "values": { + "description": "Constant values that can be referenced with the {placeholder} syntax in the query", + "type": "object" } }, "required": ["kind", "query"], @@ -2295,6 +2305,60 @@ "type": "object" }, "WebAnalyticsFilters": {}, + "WebOverviewStatsQuery": { + "additionalProperties": false, + "properties": { + "dateRange": { + "$ref": "#/definitions/DateRange" + }, + "filters": { + "$ref": "#/definitions/WebAnalyticsFilters" + }, + "kind": { + "const": "WebOverviewStatsQuery", + "type": "string" + }, + "response": { + "$ref": "#/definitions/WebOverviewStatsQueryResponse" + } + }, + "required": ["kind", "filters"], + "type": "object" + }, + "WebOverviewStatsQueryResponse": { + "additionalProperties": false, + "properties": { + "columns": { + "items": {}, + "type": "array" + }, + "is_cached": { + "type": "boolean" + }, + "last_refresh": { + "type": "string" + }, + "next_allowed_client_refresh": { + "type": "string" + }, + "result": { + "items": {}, + "type": "array" + }, + "timings": { + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + }, + "types": { + "items": {}, + "type": "array" + } + }, + "required": ["result"], + "type": "object" + }, "WebTopClicksQuery": { "additionalProperties": false, "properties": { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index b2e879887b371..52373a5e3cdd5 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -58,6 +58,7 @@ export enum NodeKind { LifecycleQuery = 'LifecycleQuery', // Web analytics queries + WebOverviewStatsQuery = 'WebOverviewStatsQuery', WebTopSourcesQuery = 'WebTopSourcesQuery', WebTopPagesQuery = 'WebTopPagesQuery', WebTopClicksQuery = 'WebTopClicksQuery', @@ -80,6 +81,7 @@ export type AnyDataNode = | HogQLQuery | HogQLMetadata | TimeToSeeDataSessionsQuery + | WebOverviewStatsQuery | WebTopSourcesQuery | WebTopClicksQuery | WebTopPagesQuery @@ -144,6 +146,8 @@ export interface HogQLQuery extends DataNode { kind: NodeKind.HogQLQuery query: string filters?: HogQLFilters + /** Constant values that can be referenced with the {placeholder} syntax in the query */ + values?: Record response?: HogQLQueryResponse } @@ -295,6 +299,7 @@ export interface DataTableNode extends Node, DataTableNodeViewProps { | PersonsNode | HogQLQuery | TimeToSeeDataSessionsQuery + | WebOverviewStatsQuery | WebTopSourcesQuery | WebTopClicksQuery | WebTopPagesQuery @@ -509,6 +514,17 @@ export interface WebAnalyticsQueryBase { dateRange?: DateRange } +export interface WebOverviewStatsQuery extends WebAnalyticsQueryBase { + kind: NodeKind.WebOverviewStatsQuery + filters: WebAnalyticsFilters + response?: WebOverviewStatsQueryResponse +} + +export interface WebOverviewStatsQueryResponse extends QueryResponse { + result: unknown[] + types?: unknown[] + columns?: unknown[] +} export interface WebTopSourcesQuery extends WebAnalyticsQueryBase { kind: NodeKind.WebTopSourcesQuery filters: WebAnalyticsFilters diff --git a/frontend/src/queries/utils.ts b/frontend/src/queries/utils.ts index 86c315f64414b..80148f5d0f08b 100644 --- a/frontend/src/queries/utils.ts +++ b/frontend/src/queries/utils.ts @@ -29,6 +29,7 @@ import { WebTopSourcesQuery, WebTopClicksQuery, WebTopPagesQuery, + WebOverviewStatsQuery, HogQLMetadata, } from '~/queries/schema' import { TaxonomicFilterGroupType, TaxonomicFilterValue } from 'lib/components/TaxonomicFilter/types' @@ -106,6 +107,10 @@ export function isHogQLMetadata(node?: Node | null): node is HogQLMetadata { return node?.kind === NodeKind.HogQLMetadata } +export function isWebOverviewStatsQuery(node?: Node | null): node is WebOverviewStatsQuery { + return node?.kind === NodeKind.WebOverviewStatsQuery +} + export function isWebTopSourcesQuery(node?: Node | null): node is WebTopSourcesQuery { return node?.kind === NodeKind.WebTopSourcesQuery } diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 6a9a72da9994a..08e3d86ff793a 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -26,7 +26,7 @@ export const appScenes: Record any> = { [Scene.Replay]: () => import('./session-recordings/SessionRecordings'), [Scene.ReplaySingle]: () => import('./session-recordings/detail/SessionRecordingDetail'), [Scene.ReplayPlaylist]: () => import('./session-recordings/playlist/SessionRecordingsPlaylistScene'), - [Scene.Person]: () => import('./persons/Person'), + [Scene.Person]: () => import('./persons/PersonScene'), [Scene.Persons]: () => import('./persons/PersonsScene'), [Scene.Groups]: () => import('./groups/Groups'), [Scene.Group]: () => import('./groups/Group'), diff --git a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx index ff79be8e258f8..b7c4abb20d8c2 100644 --- a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx +++ b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx @@ -36,9 +36,14 @@ const WARNING_TYPE_RENDERER = { return ( <> Refused to merge already identified person{' '} - {details.sourcePersonDistinctId} into{' '} - {details.targetPersonDistinctId} via an - $identify or $create_alias call (event uuid: {details.eventUuid}). + + {details.sourcePersonDistinctId} + {' '} + into{' '} + + {details.targetPersonDistinctId} + {' '} + via an $identify or $create_alias call (event uuid: {details.eventUuid}). ) }, @@ -51,9 +56,9 @@ const WARNING_TYPE_RENDERER = { return ( <> Refused to merge an illegal distinct_id{' '} - {details.illegalDistinctId} with{' '} - {details.otherDistinctId} via an $identify or - $create_alias call (event uuid: {details.eventUuid}). + {details.illegalDistinctId} with{' '} + {details.otherDistinctId} via an + $identify or $create_alias call (event uuid: {details.eventUuid}). ) }, @@ -116,8 +121,8 @@ const WARNING_TYPE_RENDERER = { return ( <> Event ingestion has overflowed capacity for distinct_id{' '} - {details.overflowDistinctId}. Events will - still be processed, but are likely to be delayed longer than usual. + {details.overflowDistinctId}. + Events will still be processed, but are likely to be delayed longer than usual. ) }, diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx index 154600a7e1d3f..fa8a0b818484c 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeBacklink.tsx @@ -68,7 +68,7 @@ function backlinkHref(id: string, type: TaxonomicFilterGroupType): string { } else if (type === TaxonomicFilterGroupType.Cohorts) { return urls.cohort(id) } else if (type === TaxonomicFilterGroupType.Persons) { - return urls.person(id) + return urls.personByDistinctId(id) } else if (type === TaxonomicFilterGroupType.Insights) { return urls.insightView(id as InsightModel['short_id']) } else if (type === TaxonomicFilterGroupType.FeatureFlags) { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx index a1f22aa53a15a..ba9270a42d3f6 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx @@ -94,13 +94,13 @@ export const NotebookNodePerson = createPostHogWidgetNode urls.person(attrs.id), + href: (attrs) => urls.personByDistinctId(attrs.id), resizeable: true, attributes: { id: {}, }, pasteOptions: { - find: urls.person('(.+)', false), + find: urls.personByDistinctId('(.+)', false), getAttributes: async (match) => { return { id: match[1] } }, diff --git a/frontend/src/scenes/persons/PersonPreview.tsx b/frontend/src/scenes/persons/PersonPreview.tsx index db68be06e921f..7b9e61fe377aa 100644 --- a/frontend/src/scenes/persons/PersonPreview.tsx +++ b/frontend/src/scenes/persons/PersonPreview.tsx @@ -32,7 +32,7 @@ export function PersonPreview(props: PersonPreviewProps): JSX.Element | null { } const display = asDisplay(person) - const url = urls.person(person?.distinct_ids[0]) + const url = urls.personByDistinctId(person?.distinct_ids[0]) return (
@@ -51,7 +51,11 @@ export function PersonPreview(props: PersonPreviewProps): JSX.Element | null { onNotebookOpened={() => props.onClose?.()} size="small" /> - } to={urls.person(person?.distinct_ids[0])} /> + } + to={urls.personByDistinctId(person?.distinct_ids[0])} + />
diff --git a/frontend/src/scenes/persons/Person.tsx b/frontend/src/scenes/persons/PersonScene.tsx similarity index 98% rename from frontend/src/scenes/persons/Person.tsx rename to frontend/src/scenes/persons/PersonScene.tsx index 73c14145d77a0..e6c7a1036bc05 100644 --- a/frontend/src/scenes/persons/Person.tsx +++ b/frontend/src/scenes/persons/PersonScene.tsx @@ -36,7 +36,7 @@ import { PersonDashboard } from './PersonDashboard' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' export const scene: SceneExport = { - component: Person, + component: PersonScene, logic: personsLogic, paramsToProps: ({ params: { _: rawUrlId } }): (typeof personsLogic)['props'] => ({ syncWithUrl: true, @@ -106,7 +106,7 @@ function PersonCaption({ person }: { person: PersonType }): JSX.Element { ) } -export function Person(): JSX.Element | null { +export function PersonScene(): JSX.Element | null { const { showCustomerSuccessDashboards, person, @@ -127,7 +127,7 @@ export function Person(): JSX.Element | null { return personLoading ? : } - const url = urls.person(urlId || person.distinct_ids[0] || String(person.id)) + const url = urls.personByDistinctId(urlId || person.distinct_ids[0] || String(person.id)) return ( <> diff --git a/frontend/src/scenes/persons/activityDescriptions.tsx b/frontend/src/scenes/persons/activityDescriptions.tsx index f6e13f0d0d921..f11568827c9dd 100644 --- a/frontend/src/scenes/persons/activityDescriptions.tsx +++ b/frontend/src/scenes/persons/activityDescriptions.tsx @@ -68,7 +68,7 @@ export function personActivityDescriber(logItem: ActivityLogItem): HumanizedChan } listParts={distinctIds.map((di) => ( - {di} + {di} ))} /> diff --git a/frontend/src/scenes/persons/person-utils.test.ts b/frontend/src/scenes/persons/person-utils.test.ts index c4cb7777b9572..2baca39945f4b 100644 --- a/frontend/src/scenes/persons/person-utils.test.ts +++ b/frontend/src/scenes/persons/person-utils.test.ts @@ -6,10 +6,10 @@ import { asLink, asDisplay } from './person-utils' describe('the person header', () => { describe('linking to a person', () => { const personLinksTestCases = [ - { distinctIds: ['a uuid'], expectedLink: urls.person('a uuid'), name: 'with one id' }, + { distinctIds: ['a uuid'], expectedLink: urls.personByDistinctId('a uuid'), name: 'with one id' }, { distinctIds: ['the first uuid', 'a uuid'], - expectedLink: urls.person('the first uuid'), + expectedLink: urls.personByDistinctId('the first uuid'), name: 'with more than one id', }, { @@ -19,7 +19,7 @@ describe('the person header', () => { }, { distinctIds: ['a+dicey/@!'], - expectedLink: urls.person('a+dicey/@!'), + expectedLink: urls.personByDistinctId('a+dicey/@!'), name: 'with no ids', }, ] diff --git a/frontend/src/scenes/persons/person-utils.ts b/frontend/src/scenes/persons/person-utils.ts index c1f9dc63b11d9..35928473b08be 100644 --- a/frontend/src/scenes/persons/person-utils.ts +++ b/frontend/src/scenes/persons/person-utils.ts @@ -60,7 +60,7 @@ export function asDisplay(person: PersonPropType | null | undefined, maxLength?: export const asLink = (person?: PersonPropType | null): string | undefined => person?.distinct_id - ? urls.person(person.distinct_id) + ? urls.personByDistinctId(person.distinct_id) : person?.distinct_ids?.length - ? urls.person(person.distinct_ids[0]) + ? urls.personByDistinctId(person.distinct_ids[0]) : undefined diff --git a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx index 7b68fc572dc75..0aea88331c5e3 100644 --- a/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx +++ b/frontend/src/scenes/project-homepage/NewlySeenPersons.tsx @@ -17,7 +17,7 @@ function PersonRow({ person }: { person: PersonType }): JSX.Element { return ( } diff --git a/frontend/src/scenes/retention/RetentionModal.tsx b/frontend/src/scenes/retention/RetentionModal.tsx index c53514f1444e3..b47409b554953 100644 --- a/frontend/src/scenes/retention/RetentionModal.tsx +++ b/frontend/src/scenes/retention/RetentionModal.tsx @@ -110,7 +110,9 @@ export function RetentionModal(): JSX.Element | null { ) : ( {asDisplay(personAppearances.person)} diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index cf2265f308ae9..b38b2df5c0790 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -242,6 +242,12 @@ export const QUERY_TYPES_METADATA: Record = { icon: InsightSQLIcon, inMenu: true, }, + [NodeKind.WebOverviewStatsQuery]: { + name: 'Overview Stats', + description: 'View overview stats for a website', + icon: InsightsTrendsIcon, + inMenu: true, + }, [NodeKind.WebTopSourcesQuery]: { name: 'Top Sources', description: 'View top sources for a website', diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index fc68124512e44..de3cf70b00df7 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -415,7 +415,7 @@ export const routes: Record = { }, {} as Record), [urls.replaySingle(':id')]: Scene.ReplaySingle, [urls.replayPlaylist(':id')]: Scene.ReplayPlaylist, - [urls.person('*', false)]: Scene.Person, + [urls.personByDistinctId('*', false)]: Scene.Person, [urls.persons()]: Scene.Persons, [urls.groups(':groupTypeIndex')]: Scene.Groups, [urls.group(':groupTypeIndex', ':groupKey', false)]: Scene.Group, diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx index 65f9af689976d..0e33003094545 100644 --- a/frontend/src/scenes/surveys/Survey.tsx +++ b/frontend/src/scenes/surveys/Survey.tsx @@ -7,6 +7,7 @@ import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { LemonButton, LemonCheckbox, + LemonCollapse, LemonDivider, LemonInput, LemonSelect, @@ -25,13 +26,13 @@ import { RatingSurveyQuestion, } from '~/types' import { FlagSelector } from 'scenes/early-access-features/EarlyAccessFeature' -import { IconCancel, IconDelete, IconPlusMini } from 'lib/lemon-ui/icons' +import { IconCancel, IconDelete, IconPlus, IconPlusMini } from 'lib/lemon-ui/icons' import { SurveyView } from './SurveyView' import { SurveyAppearance } from './SurveyAppearance' import { SurveyAPIEditor } from './SurveyAPIEditor' import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' -import { defaultSurveyFieldValues, defaultSurveyAppearance, SurveyQuestionLabel, NewSurvey } from './constants' +import { defaultSurveyFieldValues, defaultSurveyAppearance, NewSurvey } from './constants' import { FEATURE_FLAGS } from 'lib/constants' import { FeatureFlagReleaseConditions } from 'scenes/feature-flags/FeatureFlagReleaseConditions' @@ -115,168 +116,226 @@ export function SurveyForm({ id }: { id: string }): JSX.Element { /> +
Questions
{survey.questions.map( (question: LinkSurveyQuestion | SurveyQuestion | RatingSurveyQuestion, index: number) => ( - - { - const questionObj = survey.questions[0] - const isEditingQuestion = - defaultSurveyFieldValues[questionObj.type].questions[0].question !== - questionObj.question - const isEditingDescription = - defaultSurveyFieldValues[questionObj.type].questions[0].description !== - questionObj.description - const isEditingThankYouMessage = - defaultSurveyFieldValues[questionObj.type].appearance - .thankYouMessageHeader !== survey.appearance.thankYouMessageHeader - - setDefaultForQuestionType( - newType, - isEditingQuestion, - isEditingDescription, - isEditingThankYouMessage - ) - }} - options={[ - { - label: SurveyQuestionLabel[SurveyQuestionType.Open], - value: SurveyQuestionType.Open, - }, - { - label: SurveyQuestionLabel[SurveyQuestionType.Link], - value: SurveyQuestionType.Link, - }, - { - label: SurveyQuestionLabel[SurveyQuestionType.Rating], - value: SurveyQuestionType.Rating, - }, - ...(featureFlags[FEATURE_FLAGS.SURVEYS_MULTIPLE_CHOICE] - ? [ - { - label: SurveyQuestionLabel[SurveyQuestionType.SingleChoice], - value: SurveyQuestionType.SingleChoice, - }, - { - label: SurveyQuestionLabel[SurveyQuestionType.MultipleChoice], - value: SurveyQuestionType.MultipleChoice, - }, - ] - : []), - ]} - /> - - - - - {question.type === SurveyQuestionType.Link && ( - - - - )} - - - - {question.type === SurveyQuestionType.Rating && ( -
-
- - - - - - -
-
- - - - - - -
-
- )} - {(question.type === SurveyQuestionType.SingleChoice || - question.type === SurveyQuestionType.MultipleChoice) && ( -
- - {({ value, onChange }) => ( -
- {(value || []).map((choice: string, index: number) => ( -
+ + {question.question} + {survey.questions.length > 1 && ( + } + status="primary-alt" + data-attr={`delete-survey-question-${index}`} + onClick={() => { + setSurveyValue( + 'questions', + survey.questions.filter((_, i) => i !== index) + ) + }} + tooltipPlacement="topRight" + /> + )} +
+ ), + content: ( + <> + + { + const isEditingQuestion = + defaultSurveyFieldValues[question.type].questions[0] + .question !== question.question + const isEditingDescription = + defaultSurveyFieldValues[question.type].questions[0] + .description !== question.description + const isEditingThankYouMessage = + defaultSurveyFieldValues[question.type].appearance + .thankYouMessageHeader !== + survey.appearance.thankYouMessageHeader + setDefaultForQuestionType( + index, + newType, + isEditingQuestion, + isEditingDescription, + isEditingThankYouMessage + ) + }} + options={[ + { label: 'Open text', value: SurveyQuestionType.Open }, + { label: 'Link', value: SurveyQuestionType.Link }, + { label: 'Rating', value: SurveyQuestionType.Rating }, + ...[ + { + label: 'Single choice select', + value: SurveyQuestionType.SingleChoice, + }, + { + label: 'Multiple choice select', + value: SurveyQuestionType.MultipleChoice, + }, + ], + ]} + /> + + + + + {question.type === SurveyQuestionType.Link && ( + { - const newChoices = [...value] - newChoices[index] = val - onChange(newChoices) - }} - /> - } - size="small" - status="muted" - noPadding - onClick={() => { - const newChoices = [...value] - newChoices.splice(index, 1) - onChange(newChoices) - }} + value={question.link || ''} + placeholder="https://posthog.com" /> + + )} + + + + {question.type === SurveyQuestionType.Rating && ( +
+
+ + + + + + +
+
+ + + + + + +
- ))} -
- {(value || []).length < 6 && ( - } - type="secondary" - fullWidth={false} - onClick={() => { - if (!value) { - onChange(['']) - } else { - onChange([...value, '']) - } - }} - > - Add choice - - )} -
-
- )} -
-
- )} + )} + {(question.type === SurveyQuestionType.SingleChoice || + question.type === SurveyQuestionType.MultipleChoice) && ( +
+ + {({ value, onChange }) => ( +
+ {(value || []).map( + (choice: string, index: number) => ( +
+ { + const newChoices = [ + ...value, + ] + newChoices[index] = val + onChange(newChoices) + }} + /> + } + size="small" + status="muted" + noPadding + onClick={() => { + const newChoices = [ + ...value, + ] + newChoices.splice(index, 1) + onChange(newChoices) + }} + /> +
+ ) + )} +
+ {(value || []).length < 6 && ( + } + type="secondary" + fullWidth={false} + onClick={() => { + if (!value) { + onChange(['']) + } else { + onChange([...value, '']) + } + }} + > + Add choice + + )} +
+
+ )} +
+
+ )} + + ), + }, + ]} + />
) )} + {featureFlags[FEATURE_FLAGS.SURVEYS_MULTIPLE_QUESTIONS] && ( + // TODO: Add pay gate mini here once billing is resolved for it + } + onClick={() => { + setSurveyValue('questions', [...survey.questions, { ...defaultSurveyFieldValues.open }]) + }} + > + Add question + + )} {({ value, onChange }) => ( diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index 7f4fb0e241eab..a88f5381ebcb5 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -1,12 +1,12 @@ import { TZLabel } from '@posthog/apps-common' -import { LemonButton, LemonDivider } from '@posthog/lemon-ui' +import { LemonButton, LemonDivider, LemonSelect } from '@posthog/lemon-ui' import { useValues, useActions } from 'kea' import { CodeSnippet, Language } from 'lib/components/CodeSnippet' import { EditableField } from 'lib/components/EditableField/EditableField' import { More } from 'lib/lemon-ui/LemonButton/More' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' -import { capitalizeFirstLetter } from 'lib/utils' +import { capitalizeFirstLetter, pluralize } from 'lib/utils' import { useState, useEffect } from 'react' import { pluginsLogic } from 'scenes/plugins/pluginsLogic' import { Query } from '~/queries/Query/Query' @@ -15,7 +15,15 @@ import { surveysLogic } from './surveysLogic' import { PageHeader } from 'lib/components/PageHeader' import { SurveyReleaseSummary } from './Survey' import { SurveyAppearance } from './SurveyAppearance' -import { PropertyFilterType, PropertyOperator, Survey, SurveyQuestionType, SurveyType } from '~/types' +import { + PropertyFilterType, + PropertyOperator, + RatingSurveyQuestion, + Survey, + SurveyQuestion, + SurveyQuestionType, + SurveyType, +} from '~/types' import { SurveyAPIEditor } from './SurveyAPIEditor' import { LemonBanner } from 'lib/lemon-ui/LemonBanner' import { IconOpenInNew } from 'lib/lemon-ui/icons' @@ -151,10 +159,21 @@ export function SurveyView({ id }: { id: string }): JSX.Element { {survey.questions[0].question && ( <> Type - {capitalizeFirstLetter(survey.questions[0].type)} - Question + + {survey.questions.length > 1 + ? 'Multiple questions' + : capitalizeFirstLetter(survey.questions[0].type)} + + + {pluralize( + survey.questions.length, + 'Question', + 'Questions', + false + )} + {survey.questions.map((q, idx) => ( - {q.question} +
  • {q.question}
  • ))} )} @@ -269,7 +288,9 @@ export function SurveyResult({ disableEventsTable }: { disableEventsTable?: bool surveyMetricsQueries, surveyRatingQuery, surveyMultipleChoiceQuery, + currentQuestionIndexAndType, } = useValues(surveyLogic) + const { setCurrentQuestionIndexAndType } = useActions(surveyLogic) const { featureFlags } = useValues(featureFlagLogic) return ( @@ -284,20 +305,39 @@ export function SurveyResult({ disableEventsTable }: { disableEventsTable?: bool
    )} - {survey.questions[0].type === SurveyQuestionType.Rating && ( + {survey.questions.length > 1 && ( +
    + { + setCurrentQuestionIndexAndType(idx, survey.questions[idx].type) + }} + options={[ + ...survey.questions.map((q: SurveyQuestion, idx: number) => ({ + label: q.question, + value: idx, + })), + ]} + value={currentQuestionIndexAndType.idx} + /> +
    + )} + {currentQuestionIndexAndType.type === SurveyQuestionType.Rating && (
    - {featureFlags[FEATURE_FLAGS.SURVEY_NPS_RESULTS] && survey.questions[0].scale === 10 && ( - <> - -

    NPS Score

    - - - )} + {featureFlags[FEATURE_FLAGS.SURVEY_NPS_RESULTS] && + (survey.questions[currentQuestionIndexAndType.idx] as RatingSurveyQuestion).scale === 10 && ( + <> + +

    NPS Score

    + + + )}
    )} - {(survey.questions[0].type === SurveyQuestionType.SingleChoice || - survey.questions[0].type === SurveyQuestionType.MultipleChoice) && ( + {(currentQuestionIndexAndType.type === SurveyQuestionType.SingleChoice || + currentQuestionIndexAndType.type === SurveyQuestionType.MultipleChoice) && (
    diff --git a/frontend/src/scenes/surveys/constants.ts b/frontend/src/scenes/surveys/constants.ts index fa14c9310288c..47acdf8d679cf 100644 --- a/frontend/src/scenes/surveys/constants.ts +++ b/frontend/src/scenes/surveys/constants.ts @@ -12,17 +12,17 @@ export const SurveyQuestionLabel = { } export const defaultSurveyAppearance = { - backgroundColor: 'white', - textColor: 'black', + backgroundColor: '#eeeded', submitButtonText: 'Submit', - submitButtonColor: '#2c2c2c', - ratingButtonColor: '#e0e2e8', - descriptionTextColor: '#4b4b52', + submitButtonColor: 'black', + ratingButtonColor: 'white', + ratingButtonActiveColor: 'black', + borderColor: '#c9c6c6', + placeholder: '', whiteLabel: false, displayThankYouMessage: true, - placeholder: '', - position: 'right', thankYouMessageHeader: 'Thank you for your feedback!', + position: 'right', } export const defaultSurveyFieldValues = { diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index 65e93c8c31956..c8993d9aacd85 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -71,11 +71,13 @@ export const surveyLogic = kea([ actions({ editingSurvey: (editing: boolean) => ({ editing }), setDefaultForQuestionType: ( + idx: number, type: SurveyQuestionType, isEditingQuestion: boolean, isEditingDescription: boolean, isEditingThankYouMessage: boolean ) => ({ + idx, type, isEditingQuestion, isEditingDescription, @@ -85,6 +87,7 @@ export const surveyLogic = kea([ stopSurvey: true, archiveSurvey: true, resumeSurvey: true, + setCurrentQuestionIndexAndType: (idx: number, type: SurveyQuestionType) => ({ idx, type }), }), loaders(({ props, actions }) => ({ survey: { @@ -143,6 +146,9 @@ export const surveyLogic = kea([ archiveSurvey: async () => { actions.updateSurvey({ archived: true }) }, + loadSurveySuccess: ({ survey }) => { + actions.setCurrentQuestionIndexAndType(0, survey.questions[0].type) + }, })), reducers({ isEditingSurvey: [ @@ -156,28 +162,27 @@ export const surveyLogic = kea([ { setDefaultForQuestionType: ( state, - { type, isEditingQuestion, isEditingDescription, isEditingThankYouMessage } + { idx, type, isEditingQuestion, isEditingDescription, isEditingThankYouMessage } ) => { const question = isEditingQuestion - ? state.questions[0].question - : defaultSurveyFieldValues[type].questions[0].question + ? state.questions[idx].question + : defaultSurveyFieldValues[type].questions[idx].question const description = isEditingDescription - ? state.questions[0].description - : defaultSurveyFieldValues[type].questions[0].description + ? state.questions[idx].description + : defaultSurveyFieldValues[type].questions[idx].description const thankYouMessageHeader = isEditingThankYouMessage ? state.appearance.thankYouMessageHeader : defaultSurveyFieldValues[type].appearance.thankYouMessageHeader - + const newQuestions = [...state.questions] + newQuestions[idx] = { + ...state.questions[idx], + ...(defaultSurveyFieldValues[type].questions[idx] as SurveyQuestionBase), + question, + description, + } return { ...state, - questions: [ - { - ...state.questions[0], - ...(defaultSurveyFieldValues[type].questions[0] as SurveyQuestionBase), - question, - description, - }, - ], + questions: newQuestions, appearance: { ...state.appearance, ...defaultSurveyFieldValues[type].appearance, @@ -187,6 +192,12 @@ export const surveyLogic = kea([ }, }, ], + currentQuestionIndexAndType: [ + { idx: 0, type: SurveyQuestionType.Open } as { idx: number; type: SurveyQuestionType }, + { + setCurrentQuestionIndexAndType: (_, { idx, type }) => ({ idx, type }), + }, + ], }), selectors({ isSurveyRunning: [ @@ -225,19 +236,26 @@ export const surveyLogic = kea([ ) }, ], + surveyResponseProperty: [ + (s) => [s.currentQuestionIndexAndType], + (currentQuestionIndexAndType): string => { + return currentQuestionIndexAndType.idx === 0 + ? SURVEY_RESPONSE_PROPERTY + : `${SURVEY_RESPONSE_PROPERTY}_${currentQuestionIndexAndType.idx}` + }, + ], dataTableQuery: [ - (s) => [s.survey], - (survey): DataTableNode | null => { + (s) => [s.survey, s.surveyResponseProperty], + (survey, surveyResponseProperty): DataTableNode | null => { if (survey.id === 'new') { return null } const createdAt = (survey as Survey).created_at - return { kind: NodeKind.DataTableNode, source: { kind: NodeKind.EventsQuery, - select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'], + select: ['*', `properties.${surveyResponseProperty}`, 'timestamp', 'person'], orderBy: ['timestamp DESC'], where: [`event == 'survey sent'`], after: createdAt, @@ -294,8 +312,8 @@ export const surveyLogic = kea([ }, ], surveyRatingQuery: [ - (s) => [s.survey], - (survey): InsightVizNode | null => { + (s) => [s.survey, s.surveyResponseProperty], + (survey, surveyResponseProperty): InsightVizNode | null => { if (survey.id === 'new') { return null } @@ -322,15 +340,15 @@ export const surveyLogic = kea([ ], series: [{ event: SURVEY_EVENT_NAME, kind: NodeKind.EventsNode }], trendsFilter: { display: ChartDisplayType.ActionsBarValue }, - breakdown: { breakdown: '$survey_response', breakdown_type: 'event' }, + breakdown: { breakdown: surveyResponseProperty, breakdown_type: 'event' }, }, showTable: true, } }, ], surveyMultipleChoiceQuery: [ - (s) => [s.survey], - (survey): DataTableNode | null => { + (s) => [s.survey, s.surveyResponseProperty, s.currentQuestionIndexAndType], + (survey, surveyResponseProperty, currentQuestionIndexAndType): DataTableNode | null => { if (survey.id === 'new') { return null } @@ -340,14 +358,14 @@ export const surveyLogic = kea([ ? dayjs(survey.end_date).add(1, 'day').format('YYYY-MM-DD') : dayjs().add(1, 'day').format('YYYY-MM-DD') - const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` - const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` + const singleChoiceQuery = `select count(), properties.${surveyResponseProperty} as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` + const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, ${surveyResponseProperty})) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc` return { kind: NodeKind.DataTableNode, source: { kind: NodeKind.HogQLQuery, query: - survey.questions[0].type === SurveyQuestionType.SingleChoice + currentQuestionIndexAndType.type === SurveyQuestionType.SingleChoice ? singleChoiceQuery : multipleChoiceQuery, }, diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 789e0d4340978..1625a1cebc715 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -88,7 +88,7 @@ export const urls = { combineUrl(`/replay/playlists/${id}`, filters ? { filters } : {}).url, replaySingle: (id: string, filters?: Partial): string => combineUrl(`/replay/${id}`, filters ? { filters } : {}).url, - person: (id: string, encode: boolean = true): string => + personByDistinctId: (id: string, encode: boolean = true): string => encode ? `/person/${encodeURIComponent(id)}` : `/person/${id}`, persons: (): string => '/persons', groups: (groupTypeIndex: string | number): string => `/groups/${groupTypeIndex}`, diff --git a/frontend/src/scenes/web-analytics/WebAnalyticsScene.tsx b/frontend/src/scenes/web-analytics/WebAnalyticsScene.tsx index 4fb41fe7261cc..a154bb7006ebe 100644 --- a/frontend/src/scenes/web-analytics/WebAnalyticsScene.tsx +++ b/frontend/src/scenes/web-analytics/WebAnalyticsScene.tsx @@ -1,49 +1,9 @@ import { SceneExport } from 'scenes/sceneTypes' import { webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' -import { Query } from '~/queries/Query/Query' -import { NodeKind } from '~/queries/schema' +import { WebAnalyticsDashboard } from 'scenes/web-analytics/WebDashboard' export function WebAnalyticsScene(): JSX.Element { - return ( -
    - Top sources - - Top clicks - - Top pages - -
    - ) + return } export const scene: SceneExport = { diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx new file mode 100644 index 0000000000000..35485ad82b70e --- /dev/null +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -0,0 +1,21 @@ +import { Query } from '~/queries/Query/Query' +import { useValues } from 'kea' +import { webAnalyticsLogic } from 'scenes/web-analytics/webAnalyticsLogic' + +export const WebAnalyticsDashboard = (): JSX.Element => { + const { tiles } = useValues(webAnalyticsLogic) + return ( +
    + {tiles.map(({ query, layout }, i) => ( +
    + +
    + ))} +
    + ) +} diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index ccb9f70f8857e..42ec60f4642d5 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -1,13 +1,67 @@ import { actions, connect, kea, listeners, path, reducers, selectors, sharedListeners } from 'kea' import type { webAnalyticsLogicType } from './webAnalyticsLogicType' +import { NodeKind, QuerySchema } from '~/queries/schema' +interface Layout { + colSpan?: number + rowSpan?: number +} +export interface WebDashboardTile { + query: QuerySchema + layout: Layout +} export const webAnalyticsLogic = kea([ path(['scenes', 'webAnalytics', 'webAnalyticsSceneLogic']), connect({}), actions({}), reducers({}), - selectors(() => ({})), + selectors({ + tiles: [ + () => [], + (): WebDashboardTile[] => [ + { + layout: { + colSpan: 12, + }, + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebOverviewStatsQuery, + filters: {}, + }, + }, + }, + { + layout: { + colSpan: 6, + }, + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebTopPagesQuery, + filters: {}, + }, + }, + }, + { + layout: { + colSpan: 6, + }, + query: { + full: true, + kind: NodeKind.DataTableNode, + source: { + kind: NodeKind.WebTopSourcesQuery, + filters: {}, + }, + }, + }, + ], + ], + }), sharedListeners(() => ({})), listeners(() => ({})), ]) diff --git a/frontend/src/styles/utilities.scss b/frontend/src/styles/utilities.scss index 745375f1c3f57..24664fb521a6b 100644 --- a/frontend/src/styles/utilities.scss +++ b/frontend/src/styles/utilities.scss @@ -506,6 +506,14 @@ $decorations: underline, overline, line-through, no-underline; display: inline; } +.grid { + display: grid; +} + +.inline-grid { + display: inline-grid; +} + .hidden { display: none; } @@ -619,6 +627,103 @@ $decorations: underline, overline, line-through, no-underline; align-self: baseline; } +// Grid Template Columns +@for $i from 1 through 12 { + .grid-cols-#{$i} { + grid-template-columns: repeat(#{$i}, minmax(0, 1fr)); + } +} +.grid-cols-none { + grid-template-columns: none; +} + +// Grid Column Start/End +.col-auto { + grid-column: auto; +} + +@for $i from 1 through 12 { + .col-span-#{$i} { + grid-column: span #{$i} / span #{$i}; + } +} +.col-span-full { + grid-column: 1 / -1; +} + +@for $i from 1 through 13 { + .col-start-#{$i} { + grid-column-start: #{$i}; + } +} +.col-start-auto { + grid-column-start: auto; +} + +@for $i from 1 through 13 { + .col-end-#{$i} { + grid-column-end: #{$i}; + } +} +.col-end-auto { + grid-column-end: auto; +} + +// Grid Row Start/End +.row-auto { + grid-row: auto; +} + +@for $i from 1 through 6 { + .row-span-#{$i} { + grid-row: span #{$i} / span #{$i}; + } +} +.row-span-full { + grid-row: 1 / -1; +} + +@for $i from 1 through 7 { + .row-start-#{$i} { + grid-row-start: #{$i}; + } +} +.row-start-auto { + grid-row-start: auto; +} + +@for $i from 1 through 7 { + .row-end-#{$i} { + grid-row-end: #{$i}; + } +} +.row-end-auto { + grid-row-end: auto; +} + +// Gap +@each $space in $all_spaces { + .gap-#{escape-number($space)} { + gap: #{$space * 0.25}rem; + } + .gap-x-#{escape-number($space)} { + column-gap: #{$space * 0.25}rem; + } + .gap-y-#{escape-number($space)} { + row-gap: #{$space * 0.25}rem; + } +} +.gap-px { + gap: 1px; +} +.gap-x-px { + column-gap: 1px; +} +.gap-y-px { + row-gap: 1px; +} + +// Typography .font-thin { font-weight: 100; } diff --git a/frontend/src/test/init.ts b/frontend/src/test/init.ts index 59a7444c3d50e..dc896f740e8c9 100644 --- a/frontend/src/test/init.ts +++ b/frontend/src/test/init.ts @@ -19,7 +19,6 @@ export function initKeaTests(mountCommonLogic = true, teamForWindowContext: Team current_team: teamForWindowContext, } as unknown as AppContext posthog.init('no token', { - test: true, autocapture: false, disable_session_recording: true, advanced_disable_decide: true, diff --git a/package.json b/package.json index d766ed4da53bb..2ca5a921bbd3b 100644 --- a/package.json +++ b/package.json @@ -126,7 +126,7 @@ "md5": "^2.3.0", "monaco-editor": "^0.39.0", "papaparse": "^5.4.1", - "posthog-js": "1.81.1", + "posthog-js": "1.81.3", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts index 8ef61ee4ff4f9..0a4b8fa88cc37 100644 --- a/plugin-server/src/kafka/batch-consumer.ts +++ b/plugin-server/src/kafka/batch-consumer.ts @@ -4,12 +4,12 @@ import { exponentialBuckets, Gauge, Histogram } from 'prom-client' import { status } from '../utils/status' import { createAdminClient, ensureTopicExists } from './admin' import { - commitOffsetsForMessages, consumeMessages, countPartitionsPerTopic, createKafkaConsumer, disconnectConsumer, instrumentConsumerMetrics, + storeOffsetsForMessages, } from './consumer' export interface BatchConsumer { @@ -23,6 +23,7 @@ export const startBatchConsumer = async ({ connectionConfig, groupId, topic, + autoCommit, sessionTimeout, consumerMaxBytesPerPartition, consumerMaxBytes, @@ -32,13 +33,13 @@ export const startBatchConsumer = async ({ batchingTimeoutMs, topicCreationTimeoutMs, eachBatch, - autoCommit = true, cooperativeRebalance = true, queuedMinMessages = 100000, }: { connectionConfig: GlobalConfig groupId: string topic: string + autoCommit: boolean sessionTimeout: number consumerMaxBytesPerPartition: number consumerMaxBytes: number @@ -48,7 +49,6 @@ export const startBatchConsumer = async ({ batchingTimeoutMs: number topicCreationTimeoutMs: number eachBatch: (messages: Message[]) => Promise - autoCommit?: boolean cooperativeRebalance?: boolean queuedMinMessages?: number }): Promise => { @@ -76,9 +76,8 @@ export const startBatchConsumer = async ({ ...connectionConfig, 'group.id': groupId, 'session.timeout.ms': sessionTimeout, - // We disable auto commit and rather we commit after one batch has - // completed. - 'enable.auto.commit': false, + 'enable.auto.commit': autoCommit, + 'enable.auto.offset.store': false, /** * max.partition.fetch.bytes * The maximum amount of data per-partition the server will return. @@ -211,7 +210,7 @@ export const startBatchConsumer = async ({ messagesProcessed += messages.length if (autoCommit) { - commitOffsetsForMessages(messages, consumer) + storeOffsetsForMessages(messages, consumer) } } } catch (error) { diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts index 14a45f946376e..d05013aa7e6f0 100644 --- a/plugin-server/src/kafka/consumer.ts +++ b/plugin-server/src/kafka/consumer.ts @@ -203,7 +203,7 @@ export const findOffsetsToCommit = (messages: TopicPartitionOffset[]): TopicPart return highestOffsets } -export const commitOffsetsForMessages = (messages: Message[], consumer: RdKafkaConsumer) => { +export const storeOffsetsForMessages = (messages: Message[], consumer: RdKafkaConsumer) => { const topicPartitionOffsets = findOffsetsToCommit(messages).map((message) => { return { ...message, @@ -213,8 +213,8 @@ export const commitOffsetsForMessages = (messages: Message[], consumer: RdKafkaC }) if (topicPartitionOffsets.length > 0) { - status.debug('📝', 'Committing offsets', { topicPartitionOffsets }) - consumer.commit(topicPartitionOffsets) + status.debug('📝', 'Storing offsets', { topicPartitionOffsets }) + consumer.offsetsStore(topicPartitionOffsets) } } diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts index 7989efd4b356a..3d15270307722 100644 --- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts +++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts @@ -249,6 +249,7 @@ export class IngestionConsumer { connectionConfig: createRdConnectionConfigFromEnvVars(this.pluginsServer as KafkaConfig), topic: this.topic, groupId: this.consumerGroupId, + autoCommit: true, sessionTimeout: 30000, consumerMaxBytes: this.pluginsServer.KAFKA_CONSUMPTION_MAX_BYTES, consumerMaxBytesPerPartition: this.pluginsServer.KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION, diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts index 7f1c6f3fdd2f9..81b4fc9ec2be8 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v1.ts @@ -85,6 +85,7 @@ export const startSessionRecordingEventsConsumerV1 = async ({ connectionConfig, groupId, topic: KAFKA_SESSION_RECORDING_EVENTS, + autoCommit: true, sessionTimeout, consumerMaxBytesPerPartition, consumerMaxBytes, diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts index d42f756b7d49c..1cdbcdbb0aa5c 100644 --- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts +++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts @@ -451,6 +451,7 @@ export class SessionRecordingIngesterV2 { connectionConfig, groupId: KAFKA_CONSUMER_GROUP_ID, topic: KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, + autoCommit: false, sessionTimeout: KAFKA_CONSUMER_SESSION_TIMEOUT_MS, // the largest size of a message that can be fetched by the consumer. // the largest size our MSK cluster allows is 20MB @@ -464,7 +465,6 @@ export class SessionRecordingIngesterV2 { fetchBatchSize: this.config.SESSION_RECORDING_KAFKA_BATCH_SIZE, batchingTimeoutMs: this.config.KAFKA_CONSUMPTION_BATCHING_TIMEOUT_MS, topicCreationTimeoutMs: this.config.KAFKA_TOPIC_CREATION_TIMEOUT_MS, - autoCommit: false, eachBatch: async (messages) => { return await this.handleEachBatch(messages) }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index d0c40db53220a..c9d9973654106 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.0' +lockfileVersion: '6.1' settings: autoInstallPeers: true @@ -201,8 +201,8 @@ dependencies: specifier: ^5.4.1 version: 5.4.1 posthog-js: - specifier: 1.81.1 - version: 1.81.1 + specifier: 1.81.3 + version: 1.81.3 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -6104,7 +6104,7 @@ packages: dependencies: '@types/node': 18.11.9 '@types/qs': 6.9.8 - '@types/range-parser': 1.2.4 + '@types/range-parser': 1.2.5 '@types/send': 0.17.2 dev: true @@ -6248,10 +6248,18 @@ packages: resolution: {integrity: sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==} dev: true + /@types/mime@1.3.3: + resolution: {integrity: sha512-Ys+/St+2VF4+xuY6+kDIXGxbNRO0mesVg0bbxEfB97Od1Vjpjx9KD1qxs64Gcb3CWPirk9Xe+PT4YiiHQ9T+eg==} + dev: true + /@types/mime@3.0.1: resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==} dev: true + /@types/mime@3.0.2: + resolution: {integrity: sha512-Wj+fqpTLtTbG7c0tH47dkahefpLKEbB+xAZuLq7b4/IDHPl/n6VoXcyUQ2bypFlbSwvCr0y+bD4euTTqTJsPxQ==} + dev: true + /@types/ms@0.7.31: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} dev: true @@ -6344,6 +6352,10 @@ packages: resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==} dev: true + /@types/range-parser@1.2.5: + resolution: {integrity: sha512-xrO9OoVPqFuYyR/loIHjnbvvyRZREYKLjxV4+dY6v3FQR3stQ9ZxIGkaclF7YhI9hfjpuTbu14hZEy94qKLtOA==} + dev: true + /@types/react-dom@16.9.17: resolution: {integrity: sha512-qSRyxEsrm5btPXnowDOs5jSkgT8ldAA0j6Qp+otHUh+xHzy3sXmgNfyhucZjAjkgpdAUw9rJe0QRtX/l+yaS4g==} dependencies: @@ -6429,7 +6441,7 @@ packages: /@types/send@0.17.2: resolution: {integrity: sha512-aAG6yRf6r0wQ29bkS+x97BIs64ZLxeE/ARwyS6wrldMm3C1MdKwCcnnEwMC1slI8wuxJOpiUH9MioC0A0i+GJw==} dependencies: - '@types/mime': 1.3.2 + '@types/mime': 1.3.3 '@types/node': 18.11.9 dev: true @@ -6445,7 +6457,7 @@ packages: resolution: {integrity: sha512-yVRvFsEMrv7s0lGhzrggJjNOSmZCdgCjw9xWrPr/kNNLp6FaDfMC1KaYl3TSJ0c58bECwNBMoQrZJ8hA8E1eFg==} dependencies: '@types/http-errors': 2.0.2 - '@types/mime': 3.0.1 + '@types/mime': 3.0.2 '@types/node': 18.11.9 dev: true @@ -13112,7 +13124,7 @@ packages: dependencies: universalify: 2.0.0 optionalDependencies: - graceful-fs: 4.2.11 + graceful-fs: 4.2.10 /jsprim@2.0.2: resolution: {integrity: sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==} @@ -15031,8 +15043,8 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.81.1: - resolution: {integrity: sha512-pQfG9ZGVn3R7Uh1cC/S02trZ6u4TOLs1NhZG3WiNrqMKDA8MJQjZ/PqdkLO0/BeozRBfIbON6pw3xfOIneIclg==} + /posthog-js@1.81.3: + resolution: {integrity: sha512-Aqqcj1n1KqZlxMaYYfd5OJC2BMIAP927+f7XqEbEplYJKigGTbQ6ygt2UeSJZe3xcDMxyDK4jxOWy68kD3YIlw==} dependencies: fflate: 0.4.8 dev: false diff --git a/posthog/api/query.py b/posthog/api/query.py index 45358277c3eb7..078bf8cd3eaee 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -20,17 +20,14 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.clickhouse.query_tagging import tag_queries from posthog.errors import ExposedCHQueryError +from posthog.hogql import ast from posthog.hogql.ai import PromptUnclear, write_sql_from_prompt from posthog.hogql.database.database import create_hogql_database, serialize_database from posthog.hogql.errors import HogQLException from posthog.hogql.metadata import get_hogql_metadata from posthog.hogql.query import execute_hogql_query -from posthog.hogql_queries.insights.lifecycle_query_runner import LifecycleQueryRunner -from posthog.hogql_queries.insights.trends_query_runner import TrendsQueryRunner -from posthog.hogql_queries.web_analytics.top_clicks import WebTopClicksQueryRunner -from posthog.hogql_queries.web_analytics.top_pages import WebTopPagesQueryRunner -from posthog.hogql_queries.web_analytics.top_sources import WebTopSourcesQueryRunner +from posthog.hogql_queries.query_runner import get_query_runner from posthog.models import Team from posthog.models.event.events_query import run_events_query from posthog.models.user import User @@ -41,6 +38,15 @@ from posthog.schema import EventsQuery, HogQLQuery, HogQLMetadata from posthog.utils import refresh_requested_by_client +QUERY_WITH_RUNNER = [ + "LifecycleQuery", + "TrendsQuery", + "WebOverviewStatsQuery", + "WebTopSourcesQuery", + "WebTopClicksQuery", + "WebTopPagesQuery", +] + class QueryThrottle(TeamRateThrottle): scope = "query" @@ -206,20 +212,29 @@ def process_query( # query_json has been parsed by QuerySchemaParser # it _should_ be impossible to end up in here with a "bad" query query_kind = query_json.get("kind") - tag_queries(query=query_json) - if query_kind == "EventsQuery": + if query_kind in QUERY_WITH_RUNNER: + refresh_requested = refresh_requested_by_client(request) if request else False + query_runner = get_query_runner(query_json, team) + return _unwrap_pydantic_dict(query_runner.run(refresh_requested=refresh_requested)) + elif query_kind == "EventsQuery": events_query = EventsQuery.model_validate(query_json) events_response = run_events_query(query=events_query, team=team, default_limit=default_limit) return _unwrap_pydantic_dict(events_response) elif query_kind == "HogQLQuery": hogql_query = HogQLQuery.model_validate(query_json) + values = ( + {key: ast.Constant(value=value) for key, value in hogql_query.values.items()} + if hogql_query.values + else None + ) hogql_response = execute_hogql_query( query_type="HogQLQuery", query=hogql_query.query, team=team, filters=hogql_query.filters, + placeholders=values, default_limit=default_limit, ) return _unwrap_pydantic_dict(hogql_response) @@ -227,14 +242,6 @@ def process_query( metadata_query = HogQLMetadata.model_validate(query_json) metadata_response = get_hogql_metadata(query=metadata_query, team=team) return _unwrap_pydantic_dict(metadata_response) - elif query_kind == "LifecycleQuery": - refresh_requested = refresh_requested_by_client(request) if request else False - lifecycle_query_runner = LifecycleQueryRunner(query_json, team) - return _unwrap_pydantic_dict(lifecycle_query_runner.run(refresh_requested=refresh_requested)) - elif query_kind == "TrendsQuery": - refresh_requested = refresh_requested_by_client(request) if request else False - trends_query_runner = TrendsQueryRunner(query_json, team) - return _unwrap_pydantic_dict(trends_query_runner.run(refresh_requested=refresh_requested)) elif query_kind == "DatabaseSchemaQuery": database = create_hogql_database(team.pk) return serialize_database(database) @@ -253,18 +260,6 @@ def process_query( ) serializer.is_valid(raise_exception=True) return get_session_events(serializer) or {} - elif query_kind == "WebTopSourcesQuery": - refresh_requested = refresh_requested_by_client(request) if request else False - web_top_sources_query_runner = WebTopSourcesQueryRunner(query_json, team) - return _unwrap_pydantic_dict(web_top_sources_query_runner.run(refresh_requested=refresh_requested)) - elif query_kind == "WebTopClicksQuery": - refresh_requested = refresh_requested_by_client(request) if request else False - web_top_clicks_query_runner = WebTopClicksQueryRunner(query_json, team) - return _unwrap_pydantic_dict(web_top_clicks_query_runner.run(refresh_requested=refresh_requested)) - elif query_kind == "WebTopPagesQuery": - refresh_requested = refresh_requested_by_client(request) if request else False - web_top_pages_query_runner = WebTopPagesQueryRunner(query_json, team) - return _unwrap_pydantic_dict(web_top_pages_query_runner.run(refresh_requested=refresh_requested)) else: if query_json.get("source"): return process_query(team, query_json["source"]) diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index cde8de9c22196..a98cc0816d902 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -522,3 +522,22 @@ def test_full_hogql_query_view(self): ["sign out", "4", "test_val3"], ], ) + + def test_full_hogql_query_values(self): + random_uuid = str(UUIDT()) + with freeze_time("2020-01-10 12:00:00"): + for _ in range(20): + _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + flush_persons_and_events() + + with freeze_time("2020-01-10 12:14:00"): + response = process_query( + team=self.team, + query_json={ + "kind": "HogQLQuery", + "query": "select count() from events where distinct_id = {random_uuid}", + "values": {"random_uuid": random_uuid}, + }, + ) + + self.assertEqual(response.get("results", [])[0][0], 20) diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 49dac4c5b0177..915fd79de9ee3 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from datetime import datetime -from typing import Any, Generic, List, Optional, Type, Dict, TypeVar, Union, Tuple +from typing import Any, Generic, List, Optional, Type, Dict, TypeVar, Union, Tuple, cast from django.conf import settings from django.core.cache import cache @@ -17,14 +17,11 @@ from posthog.schema import ( QueryTiming, TrendsQuery, - FunnelsQuery, - RetentionQuery, - PathsQuery, - StickinessQuery, LifecycleQuery, WebTopSourcesQuery, WebTopClicksQuery, WebTopPagesQuery, + WebOverviewStatsQuery, ) from posthog.utils import generate_cache_key, get_safe_cache @@ -64,17 +61,51 @@ class CachedQueryResponse(QueryResponse): RunnableQueryNode = Union[ TrendsQuery, - FunnelsQuery, - RetentionQuery, - PathsQuery, - StickinessQuery, LifecycleQuery, + WebOverviewStatsQuery, WebTopSourcesQuery, WebTopClicksQuery, WebTopPagesQuery, ] +def get_query_runner( + query: Dict[str, Any] | RunnableQueryNode, team: Team, timings: Optional[HogQLTimings] = None +) -> "QueryRunner": + kind = None + if isinstance(query, dict): + kind = query.get("kind", None) + elif hasattr(query, "kind"): + kind = query.kind + + if kind == "LifecycleQuery": + from .insights.lifecycle_query_runner import LifecycleQueryRunner + + return LifecycleQueryRunner(query=cast(LifecycleQuery | Dict[str, Any], query), team=team, timings=timings) + if kind == "TrendsQuery": + from .insights.trends_query_runner import TrendsQueryRunner + + return TrendsQueryRunner(query=cast(TrendsQuery | Dict[str, Any], query), team=team, timings=timings) + if kind == "WebOverviewStatsQuery": + from .web_analytics.overview_stats import WebOverviewStatsQueryRunner + + return WebOverviewStatsQueryRunner(query=query, team=team, timings=timings) + if kind == "WebTopSourcesQuery": + from .web_analytics.top_sources import WebTopSourcesQueryRunner + + return WebTopSourcesQueryRunner(query=query, team=team, timings=timings) + if kind == "WebTopClicksQuery": + from .web_analytics.top_clicks import WebTopClicksQueryRunner + + return WebTopClicksQueryRunner(query=query, team=team, timings=timings) + if kind == "WebTopPagesQuery": + from .web_analytics.top_pages import WebTopPagesQueryRunner + + return WebTopPagesQueryRunner(query=query, team=team, timings=timings) + + raise ValueError(f"Can't get a runner for an unknown query kind: {kind}") + + class QueryRunner(ABC): query: RunnableQueryNode query_type: Type[RunnableQueryNode] @@ -124,7 +155,7 @@ def run(self, refresh_requested: bool) -> CachedQueryResponse: def to_query(self) -> ast.SelectQuery: raise NotImplementedError() - def to_persons_query(self) -> str: + def to_persons_query(self) -> ast.SelectQuery: # TODO: add support for selecting and filtering by breakdowns raise NotImplementedError() diff --git a/posthog/hogql_queries/utils/test/test_formula_ast.py b/posthog/hogql_queries/utils/test/test_formula_ast.py new file mode 100644 index 0000000000000..a10a54f71ca29 --- /dev/null +++ b/posthog/hogql_queries/utils/test/test_formula_ast.py @@ -0,0 +1,53 @@ +from posthog.hogql_queries.utils.formula_ast import FormulaAST +from posthog.test.base import APIBaseTest + + +class TestFormulaAST(APIBaseTest): + def _get_formula_ast(self) -> FormulaAST: + formula = FormulaAST(data=[[1, 2, 3, 4], [1, 2, 3, 4]]) + return formula + + def test_addition(self): + formula = self._get_formula_ast() + response = formula.call("A+1") + self.assertListEqual([2, 3, 4, 5], response) + + def test_subtraction(self): + formula = self._get_formula_ast() + response = formula.call("A-1") + self.assertListEqual([0, 1, 2, 3], response) + + def test_multiplication(self): + formula = self._get_formula_ast() + response = formula.call("A*2") + self.assertListEqual([2, 4, 6, 8], response) + + def test_division(self): + formula = self._get_formula_ast() + response = formula.call("A/2") + self.assertListEqual([0.5, 1, 1.5, 2], response) + + def test_modulo(self): + formula = self._get_formula_ast() + response = formula.call("A%2") + self.assertListEqual([1, 0, 1, 0], response) + + def test_power(self): + formula = self._get_formula_ast() + response = formula.call("A**2") + self.assertListEqual([1, 4, 9, 16], response) + + def test_constants(self): + formula = self._get_formula_ast() + response = formula.call("1") + self.assertListEqual([1, 1, 1, 1], response) + + def test_named_values(self): + formula = self._get_formula_ast() + response = formula.call("A+B") + self.assertListEqual([2, 4, 6, 8], response) + + def test_named_values_lower_case(self): + formula = self._get_formula_ast() + response = formula.call("a+b") + self.assertListEqual([2, 4, 6, 8], response) diff --git a/posthog/hogql_queries/web_analytics/ctes.py b/posthog/hogql_queries/web_analytics/ctes.py new file mode 100644 index 0000000000000..8fcd85b960a4f --- /dev/null +++ b/posthog/hogql_queries/web_analytics/ctes.py @@ -0,0 +1,78 @@ +# The intention is for these CTEs to become materialized views for performance reasons, but +# while these queries are under development they are left as CTEs so that they can be iterated +# on without needing database migrations + +SESSION_CTE = """ +SELECT + events.properties.`$session_id` AS session_id, + min(events.timestamp) AS min_timestamp, + max(events.timestamp) AS max_timestamp, + dateDiff('second', min_timestamp, max_timestamp) AS duration_s, + + argMin(events.properties.`$referrer`, events.timestamp) AS earliest_referrer, + argMin(events.properties.`$pathname`, events.timestamp) AS earliest_pathname, + argMax(events.properties.`$pathname`, events.timestamp ) AS latest_pathname, + argMax(events.properties.utm_source, events.timestamp) AS earliest_utm_source, + + if(domain(earliest_referrer) = '', earliest_referrer, domain(earliest_referrer)) AS referrer_domain, + multiIf( + earliest_utm_source IS NOT NULL, earliest_utm_source, + -- This will need to be an approach that scales better + referrer_domain == 'app.posthog.com', 'posthog', + referrer_domain == 'eu.posthog.com', 'posthog', + referrer_domain == 'posthog.com', 'posthog', + referrer_domain == 'www.google.com', 'google', + referrer_domain == 'www.google.co.uk', 'google', + referrer_domain == 'www.google.com.hk', 'google', + referrer_domain == 'www.google.de', 'google', + referrer_domain == 't.co', 'twitter', + referrer_domain == 'github.com', 'github', + referrer_domain == 'duckduckgo.com', 'duckduckgo', + referrer_domain == 'www.bing.com', 'bing', + referrer_domain == 'bing.com', 'bing', + referrer_domain == 'yandex.ru', 'yandex', + referrer_domain == 'quora.com', 'quora', + referrer_domain == 'www.quora.com', 'quora', + referrer_domain == 'linkedin.com', 'linkedin', + referrer_domain == 'www.linkedin.com', 'linkedin', + startsWith(referrer_domain, 'http://localhost:'), 'localhost', + referrer_domain + ) AS blended_source, + + countIf(events.event == '$pageview') AS num_pageviews, + countIf(events.event == '$autocapture') AS num_autocaptures, + -- in v1 we'd also want to count whether there were any conversion events + + any(events.person_id) as person_id, + -- definition of a GA4 bounce from here https://support.google.com/analytics/answer/12195621?hl=en + (num_autocaptures == 0 AND num_pageviews <= 1 AND duration_s < 10) AS is_bounce +FROM + events +WHERE + session_id IS NOT NULL +AND + events.timestamp >= now() - INTERVAL 8 DAY +GROUP BY + events.properties.`$session_id` +HAVING + min_timestamp >= now() - INTERVAL 7 DAY + """ + +PATHNAME_CTE = """ +SELECT + events.properties.`$prev_pageview_pathname` AS pathname, + countIf(events.event == '$pageview') as total_pageviews, + COUNT(DISTINCT events.properties.distinct_id) as unique_visitors, -- might want to use person id? have seen a small number of pages where unique > total + avg(CASE + WHEN toFloat(JSONExtractRaw(events.properties, '$prev_pageview_max_content_percentage')) IS NULL THEN NULL + WHEN toFloat(JSONExtractRaw(events.properties, '$prev_pageview_max_content_percentage')) > 0.8 THEN 100 + ELSE 0 + END) AS scroll_gt80_percentage, + avg(toFloat(JSONExtractRaw(events.properties, '$prev_pageview_max_scroll_percentage'))) as average_scroll_percentage +FROM + events +WHERE + (event = '$pageview' OR event = '$pageleave') AND events.properties.`$prev_pageview_pathname` IS NOT NULL + AND events.timestamp >= now() - INTERVAL 7 DAY +GROUP BY pathname +""" diff --git a/posthog/hogql_queries/web_analytics/overview_stats.py b/posthog/hogql_queries/web_analytics/overview_stats.py new file mode 100644 index 0000000000000..810cab5111ffe --- /dev/null +++ b/posthog/hogql_queries/web_analytics/overview_stats.py @@ -0,0 +1,59 @@ +from django.utils.timezone import datetime + +from posthog.hogql import ast +from posthog.hogql.parser import parse_select, parse_expr +from posthog.hogql.query import execute_hogql_query +from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner +from posthog.models.filters.mixins.utils import cached_property +from posthog.schema import WebOverviewStatsQueryResponse, WebOverviewStatsQuery + + +class WebOverviewStatsQueryRunner(WebAnalyticsQueryRunner): + query: WebOverviewStatsQuery + query_type = WebOverviewStatsQuery + + def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: + with self.timings.measure("date_expr"): + start = parse_expr("today() - 14") + mid = parse_expr("today() - 7") + end = parse_expr("today()") + with self.timings.measure("overview_stats_query"): + overview_stats_query = parse_select( + """ +SELECT + uniq(if(timestamp >= {mid} AND timestamp < {end}, events.distinct_id, NULL)) AS current_week_unique_users, + uniq(if(timestamp >= {start} AND timestamp < {mid}, events.distinct_id, NULL)) AS previous_week_unique_users, + + uniq(if(timestamp >= {mid} AND timestamp < {end}, events.properties.$session_id, NULL)) AS current_week_unique_sessions, + uniq(if(timestamp >= {start} AND timestamp < {mid}, events.properties.$session_id, NULL)) AS previous_week_unique_sessions, + + countIf(timestamp >= {mid} AND timestamp < {end}) AS current_week_pageviews, + countIf(timestamp >= {start} AND timestamp < {mid}) AS previous_week_pageviews +FROM + events +WHERE + event = '$pageview' AND + timestamp >= {start} AND + timestamp < {end} + """, + timings=self.timings, + placeholders={"start": start, "mid": mid, "end": end}, + ) + return overview_stats_query + + def calculate(self): + response = execute_hogql_query( + query_type="overview_stats_query", + query=self.to_query(), + team=self.team, + timings=self.timings, + ) + + return WebOverviewStatsQueryResponse( + columns=response.columns, result=response.results, timings=response.timings, types=response.types + ) + + @cached_property + def query_date_range(self): + return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now()) diff --git a/posthog/hogql_queries/web_analytics/top_pages.py b/posthog/hogql_queries/web_analytics/top_pages.py index 7ded183b80d1b..3c2db51de8504 100644 --- a/posthog/hogql_queries/web_analytics/top_pages.py +++ b/posthog/hogql_queries/web_analytics/top_pages.py @@ -3,6 +3,7 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_select from posthog.hogql.query import execute_hogql_query +from posthog.hogql_queries.web_analytics.ctes import SESSION_CTE, PATHNAME_CTE from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.filters.mixins.utils import cached_property @@ -14,123 +15,39 @@ class WebTopPagesQueryRunner(WebAnalyticsQueryRunner): query_type = WebTopPagesQuery def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: + with self.timings.measure("session_query"): + session_query = parse_select(SESSION_CTE, timings=self.timings) + with self.timings.measure("pathname_query"): + pathname_query = parse_select(PATHNAME_CTE, timings=self.timings) with self.timings.measure("top_pages_query"): top_sources_query = parse_select( """ -WITH - -scroll_depth_cte AS ( -SELECT - events.properties.`$prev_pageview_pathname` AS pathname, - countIf(events.event == '$pageview') as total_pageviews, - COUNT(DISTINCT events.properties.distinct_id) as unique_visitors, -- might want to use person id? have seen a small number of pages where unique > total - avg(CASE - WHEN events.properties.`$prev_pageview_max_content_percentage` IS NULL THEN NULL - WHEN events.properties.`$prev_pageview_max_content_percentage` > 0.8 THEN 100 - ELSE 0 - END) AS scroll_gt80_percentage, - avg(events.properties.$prev_pageview_max_scroll_percentage) * 100 as average_scroll_percentage -FROM - events -WHERE - (event = '$pageview' OR event = '$pageleave') AND events.properties.`$prev_pageview_pathname` IS NOT NULL - AND events.timestamp >= now() - INTERVAL 7 DAY -GROUP BY pathname -) - -, - -session_cte AS ( SELECT - events.properties.`$session_id` AS session_id, - min(events.timestamp) AS min_timestamp, - max(events.timestamp) AS max_timestamp, - dateDiff('second', min_timestamp, max_timestamp) AS duration_s, - - -- create a tuple so that these are grouped in the same order, see https://github.com/ClickHouse/ClickHouse/discussions/42338 - groupArray((events.timestamp, events.properties.`$referrer`, events.properties.`$pathname`, events.properties.utm_source)) AS tuple_array, - arrayFirstIndex(x -> tupleElement(x, 1) == min_timestamp, tuple_array) as index_of_earliest, - arrayFirstIndex(x -> tupleElement(x, 1) == max_timestamp, tuple_array) as index_of_latest, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 2) AS earliest_referrer, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 3) AS earliest_pathname, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 4) AS earliest_utm_source, - - if(domain(earliest_referrer) = '', earliest_referrer, domain(earliest_referrer)) AS referrer_domain, - multiIf( - earliest_utm_source IS NOT NULL, earliest_utm_source, - -- This will need to be an approach that scales better - referrer_domain == 'app.posthog.com', 'posthog', - referrer_domain == 'eu.posthog.com', 'posthog', - referrer_domain == 'posthog.com', 'posthog', - referrer_domain == 'www.google.com', 'google', - referrer_domain == 'www.google.co.uk', 'google', - referrer_domain == 'www.google.com.hk', 'google', - referrer_domain == 'www.google.de', 'google', - referrer_domain == 't.co', 'twitter', - referrer_domain == 'github.com', 'github', - referrer_domain == 'duckduckgo.com', 'duckduckgo', - referrer_domain == 'www.bing.com', 'bing', - referrer_domain == 'bing.com', 'bing', - referrer_domain == 'yandex.ru', 'yandex', - referrer_domain == 'quora.com', 'quora', - referrer_domain == 'www.quora.com', 'quora', - referrer_domain == 'linkedin.com', 'linkedin', - referrer_domain == 'www.linkedin.com', 'linkedin', - startsWith(referrer_domain, 'http://localhost:'), 'localhost', - referrer_domain - ) AS blended_source, - - countIf(events.event == '$pageview') AS num_pageviews, - countIf(events.event == '$autocapture') AS num_autocaptures, - -- in v1 we'd also want to count whether there were any conversion events - - any(events.person_id) as person_id, - -- definition of a GA4 bounce from here https://support.google.com/analytics/answer/12195621?hl=en - (num_autocaptures == 0 AND num_pageviews <= 1 AND duration_s < 10) AS is_bounce -FROM - events -WHERE - session_id IS NOT NULL -AND - events.timestamp >= now() - INTERVAL 8 DAY -GROUP BY - events.properties.`$session_id` -HAVING - min_timestamp >= now() - INTERVAL 7 DAY -) - -, - -bounce_rate_cte AS ( -SELECT session_cte.earliest_pathname, - avg(session_cte.is_bounce) as bounce_rate -FROM session_cte -GROUP BY earliest_pathname -) - - - -SELECT scroll_depth_cte.pathname as pathname, -scroll_depth_cte.total_pageviews as total_pageviews, -scroll_depth_cte.unique_visitors as unique_visitors, -scroll_depth_cte.scroll_gt80_percentage as scroll_gt80_percentage, -scroll_depth_cte.average_scroll_percentage as average_scroll_percentage, -bounce_rate_cte.bounce_rate as bounce_rate + pathname.pathname as pathname, + pathname.total_pageviews as total_pageviews, + pathname.unique_visitors as unique_visitors, + pathname.scroll_gt80_percentage as scroll_gt80_percentage, + pathname.average_scroll_percentage as average_scroll_percentage, + bounce_rate.bounce_rate as bounce_rate FROM - scroll_depth_cte LEFT OUTER JOIN bounce_rate_cte -ON scroll_depth_cte.pathname = bounce_rate_cte.earliest_pathname -ORDER BY total_pageviews DESC + {pathname_query} AS pathname +LEFT OUTER JOIN + ( + SELECT + session.earliest_pathname, + avg(session.is_bounce) as bounce_rate + FROM + {session_query} AS session + GROUP BY + session.earliest_pathname + ) AS bounce_rate +ON + pathname.pathname = bounce_rate.earliest_pathname +ORDER BY + total_pageviews DESC """, timings=self.timings, + placeholders={"pathname_query": pathname_query, "session_query": session_query}, ) return top_sources_query diff --git a/posthog/hogql_queries/web_analytics/top_sources.py b/posthog/hogql_queries/web_analytics/top_sources.py index 2762627c6002d..2071eae4d5472 100644 --- a/posthog/hogql_queries/web_analytics/top_sources.py +++ b/posthog/hogql_queries/web_analytics/top_sources.py @@ -3,8 +3,9 @@ from posthog.hogql import ast from posthog.hogql.parser import parse_select from posthog.hogql.query import execute_hogql_query -from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.hogql_queries.web_analytics.ctes import SESSION_CTE +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner from posthog.models.filters.mixins.utils import cached_property from posthog.schema import WebTopSourcesQuery, WebTopSourcesQueryResponse @@ -14,88 +15,18 @@ class WebTopSourcesQueryRunner(WebAnalyticsQueryRunner): query_type = WebTopSourcesQuery def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: + with self.timings.measure("session_query"): + session_query = parse_select(SESSION_CTE, timings=self.timings) with self.timings.measure("top_sources_query"): top_sources_query = parse_select( """ -WITH - -session_cte AS ( -SELECT - events.properties.`$session_id` AS session_id, - min(events.timestamp) AS min_timestamp, - max(events.timestamp) AS max_timestamp, - dateDiff('second', min_timestamp, max_timestamp) AS duration_s, - - -- create a tuple so that these are grouped in the same order, see https://github.com/ClickHouse/ClickHouse/discussions/42338 - groupArray((events.timestamp, events.properties.`$referrer`, events.properties.`$pathname`, events.properties.utm_source)) AS tuple_array, - arrayFirstIndex(x -> tupleElement(x, 1) == min_timestamp, tuple_array) as index_of_earliest, - arrayFirstIndex(x -> tupleElement(x, 1) == max_timestamp, tuple_array) as index_of_latest, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 2) AS earliest_referrer, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 3) AS earliest_pathname, - tupleElement(arrayElement( - tuple_array, - index_of_earliest - ), 4) AS earliest_utm_source, - - if(domain(earliest_referrer) = '', earliest_referrer, domain(earliest_referrer)) AS referrer_domain, - multiIf( - earliest_utm_source IS NOT NULL, earliest_utm_source, - -- This will need to be an approach that scales better - referrer_domain == 'app.posthog.com', 'posthog', - referrer_domain == 'eu.posthog.com', 'posthog', - referrer_domain == 'posthog.com', 'posthog', - referrer_domain == 'www.google.com', 'google', - referrer_domain == 'www.google.co.uk', 'google', - referrer_domain == 'www.google.com.hk', 'google', - referrer_domain == 'www.google.de', 'google', - referrer_domain == 't.co', 'twitter', - referrer_domain == 'github.com', 'github', - referrer_domain == 'duckduckgo.com', 'duckduckgo', - referrer_domain == 'www.bing.com', 'bing', - referrer_domain == 'bing.com', 'bing', - referrer_domain == 'yandex.ru', 'yandex', - referrer_domain == 'quora.com', 'quora', - referrer_domain == 'www.quora.com', 'quora', - referrer_domain == 'linkedin.com', 'linkedin', - referrer_domain == 'www.linkedin.com', 'linkedin', - startsWith(referrer_domain, 'http://localhost:'), 'localhost', - referrer_domain - ) AS blended_source, - - countIf(events.event == '$pageview') AS num_pageviews, - countIf(events.event == '$autocapture') AS num_autocaptures, - -- in v1 we'd also want to count whether there were any conversion events - - any(events.person_id) as person_id, - -- definition of a GA4 bounce from here https://support.google.com/analytics/answer/12195621?hl=en - (num_autocaptures == 0 AND num_pageviews <= 1 AND duration_s < 10) AS is_bounce -FROM - events -WHERE - session_id IS NOT NULL -AND - events.timestamp >= now() - INTERVAL 8 DAY -GROUP BY - events.properties.`$session_id` -HAVING - min_timestamp >= now() - INTERVAL 7 DAY -) - - - SELECT blended_source, count(num_pageviews) as total_pageviews, count(DISTINCT person_id) as unique_visitors, avg(is_bounce) AS bounce_rate FROM - session_cte + {session_query} WHERE blended_source IS NOT NULL GROUP BY blended_source @@ -104,6 +35,7 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery: LIMIT 100 """, timings=self.timings, + placeholders={"session_query": session_query}, ) return top_sources_query diff --git a/posthog/schema.py b/posthog/schema.py index b11f3af2a24bf..8a2da426b1786 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -468,6 +468,19 @@ class TrendsQueryResponse(BaseModel): timings: Optional[List[QueryTiming]] = None +class WebOverviewStatsQueryResponse(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + columns: Optional[List] = None + is_cached: Optional[bool] = None + last_refresh: Optional[str] = None + next_allowed_client_refresh: Optional[str] = None + result: List + timings: Optional[List[QueryTiming]] = None + types: Optional[List] = None + + class WebTopClicksQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -693,6 +706,16 @@ class TimeToSeeDataSessionsQuery(BaseModel): teamId: Optional[float] = Field(default=None, description="Project to filter on. Defaults to current project") +class WebOverviewStatsQuery(BaseModel): + model_config = ConfigDict( + extra="forbid", + ) + dateRange: Optional[DateRange] = None + filters: Any + kind: Literal["WebOverviewStatsQuery"] = "WebOverviewStatsQuery" + response: Optional[WebOverviewStatsQueryResponse] = None + + class WebTopClicksQuery(BaseModel): model_config = ConfigDict( extra="forbid", @@ -887,6 +910,9 @@ class HogQLQuery(BaseModel): kind: Literal["HogQLQuery"] = "HogQLQuery" query: str response: Optional[HogQLQueryResponse] = Field(default=None, description="Cached query response") + values: Optional[Dict[str, Any]] = Field( + default=None, description="Constant values that can be referenced with the {placeholder} syntax in the query" + ) class PersonsNode(BaseModel): @@ -1062,6 +1088,7 @@ class DataTableNode(BaseModel): PersonsNode, HogQLQuery, TimeToSeeDataSessionsQuery, + WebOverviewStatsQuery, WebTopSourcesQuery, WebTopClicksQuery, WebTopPagesQuery, @@ -1342,6 +1369,7 @@ class Model(RootModel): HogQLQuery, HogQLMetadata, TimeToSeeDataSessionsQuery, + WebOverviewStatsQuery, WebTopSourcesQuery, WebTopClicksQuery, WebTopPagesQuery, diff --git a/tailwind.config.js b/tailwind.config.js index 0d535f72c8a60..661c4211f7756 100644 --- a/tailwind.config.js +++ b/tailwind.config.js @@ -139,13 +139,13 @@ module.exports = { // 'gridAutoColumns', // The grid-auto-columns utilities like auto-cols-min // 'gridAutoFlow', // The grid-auto-flow utilities like grid-flow-dense // 'gridAutoRows', // The grid-auto-rows utilities like auto-rows-min - // 'gridColumn', // The grid-column utilities like col-span-6 - // 'gridColumnEnd', // The grid-column-end utilities like col-end-7 - // 'gridColumnStart', // The grid-column-start utilities like col-start-7 - // 'gridRow', // The grid-row utilities like row-span-3 - // 'gridRowEnd', // The grid-row-end utilities like row-end-4 - // 'gridRowStart', // The grid-row-start utilities like row-start-4 - // 'gridTemplateColumns', // The grid-template-columns utilities like grid-cols-7 + 'gridColumn', // The grid-column utilities like col-span-6 + 'gridColumnEnd', // The grid-column-end utilities like col-end-7 + 'gridColumnStart', // The grid-column-start utilities like col-start-7 + 'gridRow', // The grid-row utilities like row-span-3 + 'gridRowEnd', // The grid-row-end utilities like row-end-4 + 'gridRowStart', // The grid-row-start utilities like row-start-4 + 'gridTemplateColumns', // The grid-template-columns utilities like grid-cols-7 // 'gridTemplateRows', // The grid-template-rows utilities like grid-rows-4 'height', // The height utilities like h-72 // 'hueRotate', // The hue-rotate utilities like hue-rotate-30