diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx
index cbb9632310008..54275bbf8033b 100644
--- a/frontend/src/lib/constants.tsx
+++ b/frontend/src/lib/constants.tsx
@@ -241,6 +241,7 @@ export const FEATURE_FLAGS = {
BILLING_SKIP_FORECASTING: 'billing-skip-forecasting', // owner: @zach
EXPERIMENT_STATS_V2: 'experiment-stats-v2', // owner: @danielbachhuber #team-experiments
WEB_ANALYTICS_PERIOD_COMPARISON: 'web-analytics-period-comparison', // owner: @rafaeelaudibert #team-web-analytics
+ WEB_ANALYTICS_CONVERSION_GOAL_FILTERS: 'web-analytics-conversion-goal-filters', // owner: @rafaeelaudibert #team-web-analytics
} as const
export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS]
diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json
index ec98b627d4bd2..7337e86b5f1b6 100644
--- a/frontend/src/queries/schema.json
+++ b/frontend/src/queries/schema.json
@@ -12697,6 +12697,17 @@
"$ref": "#/definitions/CompareFilter",
"description": "Compare to date range"
},
+ "conversionGoal": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/WebAnalyticsConversionGoal"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "description": "Whether we should be comparing against a specific conversion goal"
+ },
"dateRange": {
"$ref": "#/definitions/InsightDateRange",
"description": "Date range for the query"
diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts
index 84816924cd351..8171185712d97 100644
--- a/frontend/src/queries/schema.ts
+++ b/frontend/src/queries/schema.ts
@@ -910,6 +910,8 @@ export interface TrendsQuery extends InsightsQueryBase {
breakdownFilter?: BreakdownFilter
/** Compare to date range */
compareFilter?: CompareFilter
+ /** Whether we should be comparing against a specific conversion goal */
+ conversionGoal?: WebAnalyticsConversionGoal | null
}
export type AssistantArrayPropertyFilterOperator = PropertyOperator.Exact | PropertyOperator.IsNot
diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
index ee32168dd7817..b04d7cdaa8953 100644
--- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
+++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsRecordings.tsx
@@ -16,6 +16,7 @@ export function WebAnalyticsRecordingsTile({ tile }: { tile: ReplayTile }): JSX.
const { layout } = tile
const { replayFilters, webAnalyticsFilters } = useValues(webAnalyticsLogic)
const { currentTeam } = useValues(teamLogic)
+
const sessionRecordingsListLogicInstance = sessionRecordingsPlaylistLogic({
logicKey: 'webAnalytics',
filters: replayFilters,
diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
index 63ef78e423bce..cda07bc69ee55 100644
--- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
+++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx
@@ -321,6 +321,11 @@ export const webAnalyticsDataTableQueryContext: QueryContext = {
render: VariationCell(),
align: 'right',
},
+ unique_conversions: {
+ title: Unique Conversions ,
+ render: VariationCell(),
+ align: 'right',
+ },
conversion_rate: {
title: Conversion Rate ,
render: VariationCell({ isPercentage: true }),
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
index cec7aad2059ee..a535b5b54ed76 100644
--- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
+++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx
@@ -45,6 +45,7 @@ import {
PropertyOperator,
RecordingUniversalFilters,
RetentionPeriod,
+ UniversalFiltersGroupValue,
} from '~/types'
import type { webAnalyticsLogicType } from './webAnalyticsLogicType'
@@ -622,6 +623,9 @@ export const webAnalyticsLogic = kea([
},
compareFilter: compareFilter || { compare: false },
filterTestAccounts,
+ conversionGoal: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS]
+ ? conversionGoal
+ : undefined,
properties: webAnalyticsFilters,
},
hidePersonsModal: true,
@@ -662,6 +666,9 @@ export const webAnalyticsLogic = kea([
compareFilter,
limit: 10,
filterTestAccounts,
+ conversionGoal: featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS]
+ ? conversionGoal
+ : undefined,
...(source || {}),
},
embedded: false,
@@ -783,14 +790,21 @@ export const webAnalyticsLogic = kea([
accessed in your application, regardless of when they were
accessed through the lifetime of a user session.
-
- The{' '}
-
- bounce rate
- {' '}
- indicates the percentage of users who left your page immediately
- after visiting without capturing any event.
-
+ {conversionGoal ? (
+
+ The conversion rate is the percentage of users who completed
+ the conversion goal in this specific path.
+
+ ) : (
+
+ The{' '}
+
+ bounce rate
+ {' '}
+ indicates the percentage of users who left your page
+ immediately after visiting without capturing any event.
+
+ )}
),
},
@@ -814,8 +828,17 @@ export const webAnalyticsLogic = kea([
title: 'Entry Path',
description: (
- Entry paths are the paths a user session started, i.e. the first
- path they saw when they opened your website.
+
+ Entry paths are the paths a user session started, i.e. the first
+ path they saw when they opened your website.
+
+ {conversionGoal && (
+
+ The conversion rate is the percentage of users who completed
+ the conversion goal after the first path in their session
+ being this path.
+
+ )}
),
},
@@ -862,6 +885,11 @@ export const webAnalyticsLogic = kea([
sampling,
limit: 10,
filterTestAccounts,
+ conversionGoal: featureFlags[
+ FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS
+ ]
+ ? conversionGoal
+ : undefined,
stripQueryParams: shouldStripQueryParams,
},
embedded: false,
@@ -1136,6 +1164,11 @@ export const webAnalyticsLogic = kea([
trendsFilter: {
display: ChartDisplayType.WorldMap,
},
+ conversionGoal: featureFlags[
+ FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS
+ ]
+ ? conversionGoal
+ : undefined,
filterTestAccounts,
properties: webAnalyticsFilters,
},
@@ -1183,63 +1216,66 @@ export const webAnalyticsLogic = kea([
],
}
: null,
- {
- kind: 'query',
- tileId: TileId.RETENTION,
- title: 'Retention',
- layout: {
- colSpanClassName: 'md:col-span-2',
- },
- query: {
- kind: NodeKind.InsightVizNode,
- source: {
- kind: NodeKind.RetentionQuery,
- properties: webAnalyticsFilters,
- dateRange,
- filterTestAccounts,
- retentionFilter: {
- retentionType: RETENTION_FIRST_TIME,
- retentionReference: 'total',
- totalIntervals: isGreaterThanMd ? 8 : 5,
- period: RetentionPeriod.Week,
- },
- },
- vizSpecificOptions: {
- [InsightType.RETENTION]: {
- hideLineGraph: true,
- hideSizeColumn: !isGreaterThanMd,
- useSmallLayout: !isGreaterThanMd,
- },
- },
- embedded: true,
- },
- insightProps: createInsightProps(TileId.RETENTION),
- canOpenInsight: false,
- canOpenModal: true,
- docs: {
- url: 'https://posthog.com/docs/web-analytics/dashboard#retention',
- title: 'Retention',
- description: (
- <>
-
-
- Retention creates a cohort of unique users who performed any event for the
- first time in the last week. It then tracks the percentage of users who
- return to perform any event in the following weeks.
-
-
- You want the numbers numbers to be the highest possible, suggesting that
- people that come to your page continue coming to your page - and performing
- an actions. Also, the further down the table the higher the numbers should
- be (or at least as high), which would indicate that you're either increasing
- or keeping your retention at the same level.
-
-
- >
- ),
- },
- },
- featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOALS]
+ !conversionGoal
+ ? {
+ kind: 'query',
+ tileId: TileId.RETENTION,
+ title: 'Retention',
+ layout: {
+ colSpanClassName: 'md:col-span-2',
+ },
+ query: {
+ kind: NodeKind.InsightVizNode,
+ source: {
+ kind: NodeKind.RetentionQuery,
+ properties: webAnalyticsFilters,
+ dateRange,
+ filterTestAccounts,
+ retentionFilter: {
+ retentionType: RETENTION_FIRST_TIME,
+ retentionReference: 'total',
+ totalIntervals: isGreaterThanMd ? 8 : 5,
+ period: RetentionPeriod.Week,
+ },
+ },
+ vizSpecificOptions: {
+ [InsightType.RETENTION]: {
+ hideLineGraph: true,
+ hideSizeColumn: !isGreaterThanMd,
+ useSmallLayout: !isGreaterThanMd,
+ },
+ },
+ embedded: true,
+ },
+ insightProps: createInsightProps(TileId.RETENTION),
+ canOpenInsight: false,
+ canOpenModal: true,
+ docs: {
+ url: 'https://posthog.com/docs/web-analytics/dashboard#retention',
+ title: 'Retention',
+ description: (
+ <>
+
+
+ Retention creates a cohort of unique users who performed any event for
+ the first time in the last week. It then tracks the percentage of
+ users who return to perform any event in the following weeks.
+
+
+ You want the numbers numbers to be the highest possible, suggesting
+ that people that come to your page continue coming to your page - and
+ performing an actions. Also, the further down the table the higher the
+ numbers should be (or at least as high), which would indicate that
+ you're either increasing or keeping your retention at the same level.
+
+
+ >
+ ),
+ },
+ }
+ : null,
+ // Hiding if conversionGoal is set already because values aren't representative
+ !conversionGoal && featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOALS]
? {
kind: 'query',
tileId: TileId.GOALS,
@@ -1290,7 +1326,7 @@ export const webAnalyticsLogic = kea([
kind: 'replay',
tileId: TileId.REPLAY,
layout: {
- colSpanClassName: 'md:col-span-1',
+ colSpanClassName: conversionGoal ? 'md:col-span-full' : 'md:col-span-1',
},
docs: {
url: 'https://posthog.com/docs/session-replay',
@@ -1300,7 +1336,7 @@ export const webAnalyticsLogic = kea([
},
}
: null,
- featureFlags[FEATURE_FLAGS.ERROR_TRACKING]
+ !conversionGoal && featureFlags[FEATURE_FLAGS.ERROR_TRACKING]
? {
kind: 'error_tracking',
tileId: TileId.ERROR_TRACKING,
@@ -1433,12 +1469,31 @@ export const webAnalyticsLogic = kea([
},
],
replayFilters: [
- (s) => [s.webAnalyticsFilters, s.dateFilter, s.shouldFilterTestAccounts],
+ (s) => [s.webAnalyticsFilters, s.dateFilter, s.shouldFilterTestAccounts, s.conversionGoal, s.featureFlags],
(
webAnalyticsFilters: WebAnalyticsPropertyFilters,
dateFilter,
- shouldFilterTestAccounts
+ shouldFilterTestAccounts,
+ conversionGoal,
+ featureFlags
): RecordingUniversalFilters => {
+ const filters: UniversalFiltersGroupValue[] = [...webAnalyticsFilters]
+ if (conversionGoal && featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOAL_FILTERS]) {
+ if ('actionId' in conversionGoal) {
+ filters.push({
+ id: conversionGoal.actionId,
+ name: String(conversionGoal.actionId),
+ type: 'actions',
+ })
+ } else if ('customEventName' in conversionGoal) {
+ filters.push({
+ id: conversionGoal.customEventName,
+ name: conversionGoal.customEventName,
+ type: 'events',
+ })
+ }
+ }
+
return {
filter_test_accounts: shouldFilterTestAccounts,
@@ -1449,7 +1504,7 @@ export const webAnalyticsLogic = kea([
values: [
{
type: FilterLogicalOperator.And,
- values: webAnalyticsFilters || [],
+ values: filters,
},
],
},
diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py
index 1633c4389879d..a85e1a47dc7b3 100644
--- a/posthog/hogql_queries/web_analytics/stats_table.py
+++ b/posthog/hogql_queries/web_analytics/stats_table.py
@@ -41,7 +41,9 @@ def __init__(self, *args, **kwargs):
def to_query(self) -> ast.SelectQuery:
if self.query.breakdownBy == WebStatsBreakdown.PAGE:
- if self.query.includeScrollDepth and self.query.includeBounceRate:
+ if self.query.conversionGoal:
+ return self.to_main_query(self._counts_breakdown_value())
+ elif self.query.includeScrollDepth and self.query.includeBounceRate:
return self.to_path_scroll_bounce_query()
elif self.query.includeBounceRate:
return self.to_path_bounce_query()
@@ -50,190 +52,77 @@ def to_query(self) -> ast.SelectQuery:
if self.query.includeBounceRate:
return self.to_entry_bounce_query()
- if self._has_session_properties():
- return self._to_main_query_with_session_properties()
+ return self.to_main_query(self._counts_breakdown_value())
- return self.to_main_query()
-
- def to_main_query(self) -> ast.SelectQuery:
+ def to_main_query(self, breakdown) -> ast.SelectQuery:
with self.timings.measure("stats_table_query"):
- query = parse_select(
- """
-WITH
- start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment,
- start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment
-SELECT
- {processed_breakdown_value} AS "context.columns.breakdown_value",
- tuple(
- uniqIf(filtered_person_id, current_period_segment),
- uniqIf(filtered_person_id, previous_period_segment)
- ) AS "context.columns.visitors",
- tuple(
- sumIf(filtered_pageview_count, current_period_segment),
- sumIf(filtered_pageview_count, previous_period_segment)
- ) AS "context.columns.views"
-FROM (
- SELECT
- any(person_id) AS filtered_person_id,
- count() AS filtered_pageview_count,
- {breakdown_value} AS breakdown_value,
- min(session.$start_timestamp) as start_timestamp
- FROM events
- WHERE and(
- timestamp >= {date_from_previous_period},
- timestamp < {date_to},
- events.event == '$pageview',
- {all_properties},
- {where_breakdown}
- )
- GROUP BY events.`$session_id`, breakdown_value
-)
-GROUP BY "context.columns.breakdown_value"
-ORDER BY "context.columns.visitors" DESC,
-"context.columns.views" DESC,
-"context.columns.breakdown_value" ASC
-""",
- timings=self.timings,
- placeholders={
- "breakdown_value": self._counts_breakdown_value(),
- "processed_breakdown_value": self._processed_breakdown_value(),
- "where_breakdown": self.where_breakdown(),
- "all_properties": self._all_properties(),
- "date_from_previous_period": self._date_from_previous_period(),
- "date_from": self._date_from(),
- "date_to": self._date_to(),
- },
- )
-
- assert isinstance(query, ast.SelectQuery)
-
- if self._include_extra_aggregation_value():
- query.select.append(self._extra_aggregation_value())
-
- return query
+ # Base selects, always returns the breakdown value, and the total number of visitors
+ selects = [
+ ast.Alias(alias="context.columns.breakdown_value", expr=self._processed_breakdown_value()),
+ self._period_comparison_tuple("filtered_person_id", "context.columns.visitors", "uniq"),
+ ]
+
+ if self.query.conversionGoal is not None:
+ selects.extend(
+ [
+ self._period_comparison_tuple("conversion_count", "context.columns.total_conversions", "sum"),
+ self._period_comparison_tuple(
+ "conversion_person_id", "context.columns.unique_conversions", "uniq"
+ ),
+ ast.Alias(
+ alias="context.columns.conversion_rate",
+ expr=ast.Tuple(
+ exprs=[
+ parse_expr(
+ "if(`context.columns.visitors`.1 = 0, NULL, `context.columns.unique_conversions`.1 / `context.columns.visitors`.1)"
+ ),
+ parse_expr(
+ "if(`context.columns.visitors`.2 = 0, NULL, `context.columns.unique_conversions`.2 / `context.columns.visitors`.2)"
+ ),
+ ]
+ ),
+ ),
+ ]
+ )
+ else:
+ selects.append(
+ self._period_comparison_tuple("filtered_pageview_count", "context.columns.views", "sum"),
+ )
- def _to_main_query_with_session_properties(self) -> ast.SelectQuery:
- with self.timings.measure("stats_table_query"):
- query = parse_select(
- """
-WITH
- start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment,
- start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment
-SELECT
- {processed_breakdown_value} AS "context.columns.breakdown_value",
- tuple(
- uniqIf(filtered_person_id, current_period_segment),
- uniqIf(filtered_person_id, previous_period_segment)
- ) AS "context.columns.visitors",
- tuple(
- sumIf(filtered_pageview_count, current_period_segment),
- sumIf(filtered_pageview_count, previous_period_segment)
- ) AS "context.columns.views"
-FROM (
- SELECT
- any(person_id) AS filtered_person_id,
- count() AS filtered_pageview_count,
- {breakdown_value} AS breakdown_value,
- session.session_id AS session_id,
- min(session.$start_timestamp) as start_timestamp
- FROM events
- WHERE and(
- timestamp >= {date_from_previous_period},
- timestamp < {date_to},
- events.event == '$pageview',
- {event_properties},
- {session_properties},
- {where_breakdown}
- )
- GROUP BY session_id, breakdown_value
-)
-GROUP BY "context.columns.breakdown_value"
-ORDER BY "context.columns.visitors" DESC,
-"context.columns.views" DESC,
-"context.columns.breakdown_value" ASC
-""",
- timings=self.timings,
- placeholders={
- "breakdown_value": self._counts_breakdown_value(),
- "processed_breakdown_value": self._processed_breakdown_value(),
- "where_breakdown": self.where_breakdown(),
- "event_properties": self._event_properties(),
- "session_properties": self._session_properties(),
- "date_from_previous_period": self._date_from_previous_period(),
- "date_from": self._date_from(),
- "date_to": self._date_to(),
- },
+ if self._include_extra_aggregation_value():
+ selects.append(self._extra_aggregation_value())
+
+ query = ast.SelectQuery(
+ select=selects,
+ select_from=ast.JoinExpr(table=self._main_inner_query(breakdown)),
+ group_by=[ast.Field(chain=["context.columns.breakdown_value"])],
+ order_by=[
+ ast.OrderExpr(expr=ast.Field(chain=["context.columns.visitors"]), order="DESC"),
+ ast.OrderExpr(
+ expr=ast.Field(
+ chain=[
+ "context.columns.views"
+ if self.query.conversionGoal is None
+ else "context.columns.total_conversions"
+ ]
+ ),
+ order="DESC",
+ ),
+ ast.OrderExpr(expr=ast.Field(chain=["context.columns.breakdown_value"]), order="ASC"),
+ ],
)
- assert isinstance(query, ast.SelectQuery)
-
- if self.query.breakdownBy == WebStatsBreakdown.LANGUAGE:
- query.select.append(self._extra_aggregation_value())
return query
def to_entry_bounce_query(self) -> ast.SelectQuery:
- with self.timings.measure("stats_table_query"):
- query = parse_select(
- """
-WITH
- start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment,
- start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment
-SELECT
- breakdown_value AS "context.columns.breakdown_value",
- tuple(
- uniqIf(filtered_person_id, current_period_segment),
- uniqIf(filtered_person_id, previous_period_segment)
- ) AS "context.columns.visitors",
- tuple(
- sumIf(filtered_pageview_count, current_period_segment),
- sumIf(filtered_pageview_count, previous_period_segment)
- ) AS "context.columns.views",
- tuple(
- avgIf(is_bounce, current_period_segment),
- avgIf(is_bounce, previous_period_segment)
- ) AS "context.columns.bounce_rate",
-FROM (
- SELECT
- {bounce_breakdown} AS breakdown_value,
- any(person_id) AS filtered_person_id,
- count() AS filtered_pageview_count,
- any(session.$is_bounce) AS is_bounce,
- session.session_id AS session_id,
- min(session.$start_timestamp) as start_timestamp
- FROM events
- WHERE and(
- timestamp >= {date_from_previous_period},
- timestamp < {date_to},
- events.event == '$pageview',
- {event_properties},
- {session_properties},
- {where_breakdown}
- )
- GROUP BY session_id, breakdown_value
-)
-GROUP BY "context.columns.breakdown_value"
-ORDER BY "context.columns.visitors" DESC,
-"context.columns.views" DESC,
-"context.columns.breakdown_value" ASC
-""",
- timings=self.timings,
- placeholders={
- "bounce_breakdown": self._bounce_entry_pathname_breakdown(),
- "where_breakdown": self.where_breakdown(),
- "session_properties": self._session_properties(),
- "event_properties": self._event_properties(),
- "date_from_previous_period": self._date_from_previous_period(),
- "date_from": self._date_from(),
- "date_to": self._date_to(),
- },
- )
- assert isinstance(query, ast.SelectQuery)
+ query = self.to_main_query(self._bounce_entry_pathname_breakdown())
+
+ if self.query.conversionGoal is None:
+ query.select.append(self._period_comparison_tuple("is_bounce", "context.columns.bounce_rate", "avg"))
+
return query
def to_path_scroll_bounce_query(self) -> ast.SelectQuery:
- if self.query.breakdownBy != WebStatsBreakdown.PAGE:
- raise NotImplementedError("Scroll depth is only supported for page breakdowns")
-
with self.timings.measure("stats_table_bounce_query"):
query = parse_select(
"""
@@ -438,6 +327,56 @@ def to_path_bounce_query(self) -> ast.SelectQuery:
assert isinstance(query, ast.SelectQuery)
return query
+ def _main_inner_query(self, breakdown):
+ query = parse_select(
+ """
+SELECT
+ any(person_id) AS filtered_person_id,
+ count() AS filtered_pageview_count,
+ {breakdown_value} AS breakdown_value,
+ session.session_id AS session_id,
+ any(session.$is_bounce) AS is_bounce,
+ min(session.$start_timestamp) as start_timestamp
+FROM events
+WHERE and(timestamp >= {date_from}, timestamp < {date_to}, {event_where}, {all_properties}, {where_breakdown})
+GROUP BY session_id, breakdown_value
+""",
+ timings=self.timings,
+ placeholders={
+ "breakdown_value": breakdown,
+ "date_from": self._date_from_previous_period(),
+ "date_to": self._date_to(),
+ "event_where": self.event_type_expr,
+ "all_properties": self._all_properties(),
+ "where_breakdown": self.where_breakdown(),
+ },
+ )
+
+ assert isinstance(query, ast.SelectQuery)
+
+ if self.conversion_count_expr and self.conversion_person_id_expr:
+ query.select.append(ast.Alias(alias="conversion_count", expr=self.conversion_count_expr))
+ query.select.append(ast.Alias(alias="conversion_person_id", expr=self.conversion_person_id_expr))
+
+ return query
+
+ def _period_comparison_tuple(self, column, alias, function_name):
+ return ast.Alias(
+ alias=alias,
+ expr=ast.Tuple(
+ exprs=[
+ self._current_period_aggregate(function_name, column),
+ self._previous_period_aggregate(function_name, column),
+ ]
+ ),
+ )
+
+ def _current_period_aggregate(self, function_name, column_name):
+ return self.period_aggregate(function_name, column_name, self._date_from(), self._date_to())
+
+ def _previous_period_aggregate(self, function_name, column_name):
+ return self.period_aggregate(function_name, column_name, self._date_from_previous_period(), self._date_from())
+
def _event_properties(self) -> ast.Expr:
properties = [
p for p in self.query.properties + self._test_account_filters if get_property_type(p) in ["event", "person"]
@@ -461,22 +400,6 @@ def map_scroll_property(property: Union[EventPropertyFilter, PersonPropertyFilte
]
return property_to_expr(properties, team=self.team, scope="event")
- def _has_session_properties(self) -> bool:
- return any(
- get_property_type(p) == "session" for p in self.query.properties + self._test_account_filters
- ) or self.query.breakdownBy in {
- WebStatsBreakdown.INITIAL_CHANNEL_TYPE,
- WebStatsBreakdown.INITIAL_REFERRING_DOMAIN,
- WebStatsBreakdown.INITIAL_UTM_SOURCE,
- WebStatsBreakdown.INITIAL_UTM_CAMPAIGN,
- WebStatsBreakdown.INITIAL_UTM_MEDIUM,
- WebStatsBreakdown.INITIAL_UTM_TERM,
- WebStatsBreakdown.INITIAL_UTM_CONTENT,
- WebStatsBreakdown.INITIAL_PAGE,
- WebStatsBreakdown.EXIT_PAGE,
- WebStatsBreakdown.INITIAL_UTM_SOURCE_MEDIUM_CAMPAIGN,
- }
-
def _session_properties(self) -> ast.Expr:
properties = [
p for p in self.query.properties + self._test_account_filters if get_property_type(p) == "session"
@@ -513,11 +436,14 @@ def calculate(self):
results,
{
0: self._join_with_aggregation_value, # breakdown_value
- 1: lambda tuple, row: (self._unsample(tuple[0], row), self._unsample(tuple[1], row)), # Views (tuple)
- 2: lambda tuple, row: (
+ 1: lambda tuple, row: ( # Views (tuple)
+ self._unsample(tuple[0], row),
+ self._unsample(tuple[1], row),
+ ),
+ 2: lambda tuple, row: ( # Visitors (tuple)
self._unsample(tuple[0], row),
self._unsample(tuple[1], row),
- ), # Visitors (tuple)
+ ),
},
)
@@ -525,9 +451,9 @@ def calculate(self):
if self.query.breakdownBy == WebStatsBreakdown.LANGUAGE:
# Keep only first 3 columns, we don't need the aggregation value in the frontend
- results_mapped = [[column for idx, column in enumerate(row) if idx < 3] for row in results_mapped]
+ # Remove both the value and the column (used to generate table headers)
+ results_mapped = [row[:3] for row in results_mapped]
- # Remove this before returning it to the frontend
columns = (
[column for column in response.columns if column != "context.columns.aggregation_value"]
if response.columns is not None
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
index ae4b48b0632c1..f59c95931dd07 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
@@ -3,7 +3,7 @@
from freezegun import freeze_time
from posthog.hogql_queries.web_analytics.stats_table import WebStatsTableQueryRunner
-from posthog.models import Cohort
+from posthog.models import Action, Cohort, Element
from posthog.models.utils import uuid7
from posthog.schema import (
DateRange,
@@ -13,6 +13,8 @@
PropertyOperator,
SessionTableVersion,
HogQLQueryModifiers,
+ CustomEventConversionGoal,
+ ActionConversionGoal,
)
from posthog.test.base import (
APIBaseTest,
@@ -38,13 +40,27 @@ def _create_events(self, data, event="$pageview"):
},
)
)
- for timestamp, session_id, pathname in timestamps:
+ for timestamp, session_id, *extra in timestamps:
+ url = None
+ elements = None
+ if event == "$pageview":
+ url = extra[0] if extra else None
+ elif event == "$autocapture":
+ elements = extra[0] if extra else None
+ properties = extra[1] if extra and len(extra) > 1 else {}
+
_create_event(
team=self.team,
event=event,
distinct_id=id,
timestamp=timestamp,
- properties={"$session_id": session_id, "$pathname": pathname},
+ properties={
+ "$session_id": session_id,
+ "$pathname": url,
+ "$current_url": url,
+ **properties,
+ },
+ elements=elements,
)
return person_result
@@ -107,6 +123,8 @@ def _run_web_stats_table_query(
include_bounce_rate=False,
include_scroll_depth=False,
properties=None,
+ action: Optional[Action] = None,
+ custom_event: Optional[str] = None,
session_table_version: SessionTableVersion = SessionTableVersion.V2,
filter_test_accounts: Optional[bool] = False,
):
@@ -119,6 +137,11 @@ def _run_web_stats_table_query(
doPathCleaning=bool(path_cleaning_filters),
includeBounceRate=include_bounce_rate,
includeScrollDepth=include_scroll_depth,
+ conversionGoal=ActionConversionGoal(actionId=action.id)
+ if action
+ else CustomEventConversionGoal(customEventName=custom_event)
+ if custom_event
+ else None,
filterTestAccounts=filter_test_accounts,
)
self.team.path_cleaning_filters = path_cleaning_filters or []
@@ -1255,3 +1278,217 @@ def test_timezone_filter_with_empty_timezone(self):
# Don't crash, treat all of them null
assert results == []
+
+ def test_conversion_goal_no_conversions(self):
+ s1 = str(uuid7("2023-12-01"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]),
+ ]
+ )
+
+ action = Action.objects.create(
+ team=self.team,
+ name="Visited Bar",
+ steps_json=[{"event": "$pageview", "url": "https://www.example.com/bar", "url_matching": "regex"}],
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action
+ )
+
+ assert [["https://www.example.com/foo", (1, 0), (0, 0), (0, 0), (0, None)]] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
+
+ def test_conversion_goal_one_pageview_conversion(self):
+ s1 = str(uuid7("2023-12-01"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]),
+ ]
+ )
+
+ action = Action.objects.create(
+ team=self.team,
+ name="Visited Foo",
+ steps_json=[
+ {
+ "event": "$pageview",
+ "url": "https://www.example.com/foo",
+ "url_matching": "regex",
+ }
+ ],
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action
+ )
+
+ assert [["https://www.example.com/foo", (1, 0), (1, 0), (1, 0), (1, None)]] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
+
+ def test_conversion_goal_one_custom_event_conversion(self):
+ s1 = str(uuid7("2023-12-01"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-01", s1, "https://www.example.com/foo")]),
+ ],
+ event="custom_event",
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01",
+ "2023-12-03",
+ breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null
+ custom_event="custom_event",
+ )
+
+ assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
+
+ def test_conversion_goal_one_custom_action_conversion(self):
+ s1 = str(uuid7("2023-12-01"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-01", s1)]),
+ ],
+ event="custom_event",
+ )
+
+ action = Action.objects.create(
+ team=self.team,
+ name="Did Custom Event",
+ steps_json=[
+ {
+ "event": "custom_event",
+ }
+ ],
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01",
+ "2023-12-03",
+ breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null
+ action=action,
+ )
+
+ assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
+
+ def test_conversion_goal_one_autocapture_conversion(self):
+ s1 = str(uuid7("2023-12-01"))
+ self._create_events(
+ [
+ ("p1", [("2023-12-01", s1, [Element(nth_of_type=1, nth_child=0, tag_name="button", text="Pay $10")])]),
+ ],
+ event="$autocapture",
+ )
+
+ action = Action.objects.create(
+ team=self.team,
+ name="Paid $10",
+ steps_json=[
+ {
+ "event": "$autocapture",
+ "tag_name": "button",
+ "text": "Pay $10",
+ }
+ ],
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01",
+ "2023-12-03",
+ breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, # Allow the breakdown value to be non-null
+ action=action,
+ )
+
+ assert [[None, (1, 0), (1, 0), (1, 0), (1, None)]] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
+
+ def test_conversion_rate(self):
+ s1 = str(uuid7("2023-12-01"))
+ s2 = str(uuid7("2023-12-01"))
+ s3 = str(uuid7("2023-12-01"))
+
+ self._create_events(
+ [
+ (
+ "p1",
+ [
+ ("2023-12-01", s1, "https://www.example.com/foo"),
+ ("2023-12-01", s1, "https://www.example.com/foo"),
+ ],
+ ),
+ (
+ "p2",
+ [
+ ("2023-12-01", s2, "https://www.example.com/foo"),
+ ("2023-12-01", s2, "https://www.example.com/bar"),
+ ],
+ ),
+ ("p3", [("2023-12-01", s3, "https://www.example.com/bar")]),
+ ]
+ )
+
+ action = Action.objects.create(
+ team=self.team,
+ name="Visited Foo",
+ steps_json=[
+ {
+ "event": "$pageview",
+ "url": "https://www.example.com/foo",
+ "url_matching": "regex",
+ }
+ ],
+ )
+
+ response = self._run_web_stats_table_query(
+ "2023-12-01", "2023-12-03", breakdown_by=WebStatsBreakdown.PAGE, action=action
+ )
+
+ assert [
+ ["https://www.example.com/foo", (2, 0), (3, 0), (2, 0), (1, None)],
+ ["https://www.example.com/bar", (2, 0), (0, 0), (0, 0), (0, None)],
+ ] == response.results
+ assert [
+ "context.columns.breakdown_value",
+ "context.columns.visitors",
+ "context.columns.total_conversions",
+ "context.columns.unique_conversions",
+ "context.columns.conversion_rate",
+ ] == response.columns
diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
index b73772ef79a90..016e1e50e8dad 100644
--- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
+++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py
@@ -11,12 +11,15 @@
from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL
from posthog.hogql import ast
from posthog.hogql.parser import parse_expr, parse_select
-from posthog.hogql.property import property_to_expr
+from posthog.hogql.property import property_to_expr, action_to_expr
from posthog.hogql.query import execute_hogql_query
from posthog.hogql_queries.query_runner import QueryRunner
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
+from posthog.models import Action
from posthog.models.filters.mixins.utils import cached_property
from posthog.schema import (
+ ActionConversionGoal,
+ CustomEventConversionGoal,
EventPropertyFilter,
WebOverviewQuery,
WebStatsTableQuery,
@@ -57,6 +60,57 @@ def property_filters_without_pathname(
) -> list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]:
return [p for p in self.query.properties if p.key != "$pathname"]
+ @cached_property
+ def conversion_goal_expr(self) -> Optional[ast.Expr]:
+ if isinstance(self.query.conversionGoal, ActionConversionGoal):
+ action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id)
+ return action_to_expr(action)
+ elif isinstance(self.query.conversionGoal, CustomEventConversionGoal):
+ return ast.CompareOperation(
+ left=ast.Field(chain=["events", "event"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value=self.query.conversionGoal.customEventName),
+ )
+ else:
+ return None
+
+ @cached_property
+ def conversion_count_expr(self) -> Optional[ast.Expr]:
+ if self.conversion_goal_expr:
+ return ast.Call(name="countIf", args=[self.conversion_goal_expr])
+ else:
+ return None
+
+ @cached_property
+ def conversion_person_id_expr(self) -> Optional[ast.Expr]:
+ if self.conversion_goal_expr:
+ return ast.Call(
+ name="any",
+ args=[
+ ast.Call(
+ name="if",
+ args=[
+ self.conversion_goal_expr,
+ ast.Field(chain=["events", "person_id"]),
+ ast.Constant(value=None),
+ ],
+ )
+ ],
+ )
+ else:
+ return None
+
+ @cached_property
+ def event_type_expr(self) -> ast.Expr:
+ pageview_expr = ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview")
+ )
+
+ if self.conversion_goal_expr:
+ return ast.Call(name="or", args=[pageview_expr, self.conversion_goal_expr])
+ else:
+ return pageview_expr
+
def period_aggregate(self, function_name, column_name, start, end, alias=None, params=None):
expr = ast.Call(
name=function_name + "If",
diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py
index 2a41455a6ad29..62149a1eb7289 100644
--- a/posthog/hogql_queries/web_analytics/web_overview.py
+++ b/posthog/hogql_queries/web_analytics/web_overview.py
@@ -5,20 +5,17 @@
from posthog.hogql import ast
from posthog.hogql.parser import parse_select
-from posthog.hogql.property import property_to_expr, get_property_type, action_to_expr
+from posthog.hogql.property import property_to_expr, get_property_type
from posthog.hogql.query import execute_hogql_query
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.hogql_queries.web_analytics.web_analytics_query_runner import (
WebAnalyticsQueryRunner,
)
-from posthog.models import Action
from posthog.models.filters.mixins.utils import cached_property
from posthog.schema import (
CachedWebOverviewQueryResponse,
WebOverviewQueryResponse,
WebOverviewQuery,
- ActionConversionGoal,
- CustomEventConversionGoal,
SessionTableVersion,
)
@@ -97,39 +94,6 @@ def session_properties(self) -> ast.Expr:
]
return property_to_expr(properties, team=self.team, scope="event")
- @cached_property
- def conversion_goal_expr(self) -> Optional[ast.Expr]:
- if isinstance(self.query.conversionGoal, ActionConversionGoal):
- action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id)
- return action_to_expr(action)
- elif isinstance(self.query.conversionGoal, CustomEventConversionGoal):
- return ast.CompareOperation(
- left=ast.Field(chain=["events", "event"]),
- op=ast.CompareOperationOp.Eq,
- right=ast.Constant(value=self.query.conversionGoal.customEventName),
- )
- else:
- return None
-
- @cached_property
- def conversion_person_id_expr(self) -> Optional[ast.Expr]:
- if self.conversion_goal_expr:
- return ast.Call(
- name="any",
- args=[
- ast.Call(
- name="if",
- args=[
- self.conversion_goal_expr,
- ast.Field(chain=["events", "person_id"]),
- ast.Constant(value=None),
- ],
- )
- ],
- )
- else:
- return None
-
@cached_property
def pageview_count_expression(self) -> ast.Expr:
if self.conversion_goal_expr:
@@ -146,24 +110,6 @@ def pageview_count_expression(self) -> ast.Expr:
else:
return ast.Call(name="count", args=[])
- @cached_property
- def conversion_count_expr(self) -> Optional[ast.Expr]:
- if self.conversion_goal_expr:
- return ast.Call(name="countIf", args=[self.conversion_goal_expr])
- else:
- return None
-
- @cached_property
- def event_type_expr(self) -> ast.Expr:
- pageview_expr = ast.CompareOperation(
- op=ast.CompareOperationOp.Eq, left=ast.Field(chain=["event"]), right=ast.Constant(value="$pageview")
- )
-
- if self.conversion_goal_expr and self.conversion_goal_expr != ast.Constant(value=None):
- return ast.Call(name="or", args=[pageview_expr, self.conversion_goal_expr])
- else:
- return pageview_expr
-
@cached_property
def inner_select(self) -> ast.SelectQuery:
start = self.query_date_range.previous_period_date_from_as_hogql()
diff --git a/posthog/schema.py b/posthog/schema.py
index 12f9bb871873e..d81b33c913ce2 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -5824,6 +5824,9 @@ class TrendsQuery(BaseModel):
aggregation_group_type_index: Optional[int] = Field(default=None, description="Groups aggregation")
breakdownFilter: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions")
compareFilter: Optional[CompareFilter] = Field(default=None, description="Compare to date range")
+ conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = Field(
+ default=None, description="Whether we should be comparing against a specific conversion goal"
+ )
dateRange: Optional[InsightDateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
default=False, description="Exclude internal and test users by applying the respective filters"