diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
similarity index 93%
rename from frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
rename to frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
index 1ac9395728811..1060246c67d27 100644
--- a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
@@ -9,7 +9,6 @@ import recordingSnapshotsJson from 'scenes/session-recordings/__mocks__/recordin
import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json'
import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
import recording_playlists from './__mocks__/recording_playlists.json'
-import { ReplayTabs } from '~/types'
const meta: Meta = {
title: 'Scenes-App/Recordings',
@@ -17,6 +16,7 @@ const meta: Meta = {
layout: 'fullscreen',
viewMode: 'story',
mockDate: '2023-02-01',
+ waitForSelector: '.PlayerFrame__content .replayer-wrapper iframe',
},
decorators: [
mswDecorator({
@@ -81,7 +81,7 @@ const meta: Meta = {
},
]
},
- '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings?limit=100': (req) => {
+ '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings': (req) => {
const playlistId = req.params.playlist_id
const response = playlistId === '1234567' ? recordings : []
return [200, { has_next: false, results: response, version: 1 }]
@@ -89,6 +89,12 @@ const meta: Meta = {
// without the session-recording-blob-replay feature flag, we only load via ClickHouse
'/api/projects/:team/session_recordings/:id/snapshots': recordingSnapshotsJson,
'/api/projects/:team/session_recordings/:id': recordingMetaJson,
+ 'api/projects/:team/notebooks': {
+ count: 0,
+ next: null,
+ previous: null,
+ results: [],
+ },
},
post: {
'/api/projects/:team/query': recordingEventsJson,
@@ -97,16 +103,10 @@ const meta: Meta = {
],
}
export default meta
-export function RecordingsList(): JSX.Element {
- useEffect(() => {
- router.actions.push(urls.replay())
- }, [])
- return
-}
-export function RecordingsPlayLists(): JSX.Element {
+export function RecentRecordings(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.replay(ReplayTabs.Playlists))
+ router.actions.push(urls.replay())
}, [])
return
}
diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
new file mode 100644
index 0000000000000..657fbccf4bc29
--- /dev/null
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
@@ -0,0 +1,48 @@
+import { Meta } from '@storybook/react'
+import { useEffect } from 'react'
+import { mswDecorator } from '~/mocks/browser'
+import { router } from 'kea-router'
+import { urls } from 'scenes/urls'
+import { App } from 'scenes/App'
+import recording_playlists from './__mocks__/recording_playlists.json'
+import { ReplayTabs } from '~/types'
+import recordings from 'scenes/session-recordings/__mocks__/recordings.json'
+import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
+
+const meta: Meta = {
+ title: 'Scenes-App/Recordings',
+ parameters: {
+ layout: 'fullscreen',
+ viewMode: 'story',
+ mockDate: '2023-02-01',
+ },
+ decorators: [
+ mswDecorator({
+ get: {
+ '/api/projects/:team_id/session_recording_playlists': recording_playlists,
+ '/api/projects/:team_id/session_recordings': (req) => {
+ const version = req.url.searchParams.get('version')
+ return [
+ 200,
+ {
+ has_next: false,
+ results: recordings,
+ version,
+ },
+ ]
+ },
+ },
+ post: {
+ '/api/projects/:team/query': recordingEventsJson,
+ },
+ }),
+ ],
+}
+export default meta
+
+export function RecordingsPlayLists(): JSX.Element {
+ useEffect(() => {
+ router.actions.push(urls.replay(ReplayTabs.Playlists))
+ }, [])
+ return
+}
diff --git a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
index f2db148045646..0afa00a98d244 100644
--- a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
+++ b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
@@ -1,6 +1,6 @@
[
{
- "id": "$pageview",
+ "id": "$pageview1",
"event": "$pageview",
"name": "$event_before_recording_starts",
"type": "events",
@@ -14,7 +14,7 @@
"elements_hash": ""
},
{
- "id": "$pageview",
+ "id": "$pageview2",
"name": "$pageview",
"event": "$pageview",
"type": "events",
diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
index fc5f0cc5ed158..451f1cf616f8a 100644
--- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
+++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
@@ -4,16 +4,18 @@ import {
} from 'scenes/session-recordings/player/sessionRecordingPlayerLogic'
import { useActions, useValues } from 'kea'
import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton'
-import { IconComment, IconDelete, IconLink } from 'lib/lemon-ui/icons'
+import { IconComment, IconDelete, IconJournalPlus, IconLink } from 'lib/lemon-ui/icons'
import { openPlayerShareDialog } from 'scenes/session-recordings/player/share/PlayerShare'
import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover'
import { LemonDialog } from 'lib/lemon-ui/LemonDialog'
import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton'
import { NotebookNodeType } from '~/types'
+import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic'
export function PlayerMetaLinks(): JSX.Element {
const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic)
const { setPause, deleteRecording } = useActions(sessionRecordingPlayerLogic)
+ const nodeLogic = useNotebookNode()
const getCurrentPlayerTime = (): number => {
// NOTE: We pull this value at call time as otherwise it would trigger re-renders if pulled from the hook
@@ -78,9 +80,24 @@ export function PlayerMetaLinks(): JSX.Element {
Share
-
- Pin
-
+ {nodeLogic ? (
+ nodeLogic.props.nodeType !== NotebookNodeType.Recording ? (
+ }
+ size="small"
+ onClick={() => {
+ nodeLogic.actions.insertAfter({
+ type: NotebookNodeType.Recording,
+ attrs: { id: sessionRecordingId },
+ })
+ }}
+ />
+ ) : null
+ ) : (
+
+ Pin
+
+ )}
{logicProps.playerKey !== 'modal' && (
{
+ it.each([
+ `@font-face { font-display: fallback; font-family: "Roboto Condensed"; font-weight: 400; font-style: normal; src: url("https://posthog.com/assets/fonts/roboto/roboto_condensed_reg-webfont.woff2?11012022") format("woff2"), url("https://posthog.com/assets/fonts/roboto/roboto_condensed_reg-webfont.woff?11012022")`,
+ `url("https://app.posthog.com/fonts/my-font.woff2")`,
+ ])('should replace font urls in stylesheets', (content: string) => {
+ expect(CorsPlugin._replaceFontCssUrls(content)).toMatchSnapshot()
+ })
+
+ it.each(['https://app.posthog.com/fonts/my-font.woff2?t=1234', 'https://app.posthog.com/fonts/my-font.ttf'])(
+ 'should replace font urls in links',
+ (content: string) => {
+ expect(CorsPlugin._replaceFontUrl(content)).toMatchSnapshot()
+ }
+ )
+
+ it.each(['https://app.posthog.com/my-image.jpeg'])(
+ 'should not replace non-font urls in links',
+ (content: string) => {
+ expect(CorsPlugin._replaceFontUrl(content)).toEqual(content)
+ }
+ )
+})
diff --git a/frontend/src/scenes/session-recordings/player/rrweb/index.ts b/frontend/src/scenes/session-recordings/player/rrweb/index.ts
new file mode 100644
index 0000000000000..f2032d070d4a0
--- /dev/null
+++ b/frontend/src/scenes/session-recordings/player/rrweb/index.ts
@@ -0,0 +1,38 @@
+import { ReplayPlugin, playerConfig } from 'rrweb/typings/types'
+
+const PROXY_URL = 'https://replay.ph-proxy.com' as const
+
+export const CorsPlugin: ReplayPlugin & {
+ _replaceFontCssUrls: (value: string) => string
+ _replaceFontUrl: (value: string) => string
+} = {
+ _replaceFontCssUrls: (value: string): string => {
+ return value.replace(
+ /url\("(https:\/\/\S*(?:.eot|.woff2|.ttf|.woff)\S*)"\)/gi,
+ `url("${PROXY_URL}/proxy?url=$1")`
+ )
+ },
+
+ _replaceFontUrl: (value: string): string => {
+ return value.replace(/^(https:\/\/\S*(?:.eot|.woff2|.ttf|.woff)\S*)$/i, `${PROXY_URL}/proxy?url=$1`)
+ },
+
+ onBuild: (node) => {
+ if (node.nodeName === 'STYLE') {
+ const styleElement = node as HTMLStyleElement
+ styleElement.innerText = CorsPlugin._replaceFontCssUrls(styleElement.innerText)
+ }
+
+ if (node.nodeName === 'LINK') {
+ const linkElement = node as HTMLLinkElement
+ linkElement.href = CorsPlugin._replaceFontUrl(linkElement.href)
+ }
+ },
+}
+
+export const COMMON_REPLAYER_CONFIG: Partial = {
+ triggerFocus: false,
+ insertStyleRules: [
+ `.ph-no-capture { background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg=="); }`,
+ ],
+}
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts
index 8854498605774..b59f9566fb9a7 100644
--- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.test.ts
@@ -203,7 +203,7 @@ describe('sessionRecordingDataLogic', () => {
kind: 'EventsQuery',
limit: 1000000,
orderBy: ['timestamp ASC'],
- personId: 11,
+ personId: '11',
properties: [{ key: '$session_id', operator: 'exact', type: 'event', value: ['2'] }],
select: [
'uuid',
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts
index fe609a6f64f79..8db80c5d51ec7 100644
--- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts
@@ -487,7 +487,7 @@ export const sessionRecordingDataLogic = kea([
],
orderBy: ['timestamp ASC'],
limit: 1000000,
- personId: person.id,
+ personId: String(person.id),
after: start.subtract(BUFFER_MS, 'ms').format(),
before: end.add(BUFFER_MS, 'ms').format(),
properties: [properties],
@@ -541,7 +541,7 @@ export const sessionRecordingDataLogic = kea([
select: ['properties', 'timestamp'],
orderBy: ['timestamp ASC'],
limit: 100,
- personId: person?.id,
+ personId: String(person?.id),
after: dayjs(event.timestamp).subtract(1000, 'ms').format(),
before: dayjs(event.timestamp).add(1000, 'ms').format(),
event: existingEvent.event,
diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
index e7668af245c1b..2ad9a9dfae4f5 100644
--- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
+++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts
@@ -31,7 +31,12 @@ import { SessionRecordingPlayerExplorerProps } from './view-explorer/SessionReco
import { createExportedSessionRecording } from '../file-playback/sessionRecordingFilePlaybackLogic'
import { RefObject } from 'react'
import posthog from 'posthog-js'
+import { COMMON_REPLAYER_CONFIG, CorsPlugin } from './rrweb'
import { now } from 'lib/dayjs'
+import { ReplayPlugin } from 'rrweb/typings/types'
+import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic'
+import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
+import { FEATURE_FLAGS } from 'lib/constants'
export const PLAYBACK_SPEEDS = [0.5, 1, 2, 3, 4, 8, 16]
export const ONE_FRAME_MS = 100 // We don't really have frames but this feels granular enough
@@ -102,6 +107,10 @@ export const sessionRecordingPlayerLogic = kea(
['speed', 'skipInactivitySetting'],
userLogic,
['hasAvailableFeature'],
+ preflightLogic,
+ ['preflight'],
+ featureFlagLogic,
+ ['featureFlags'],
],
actions: [
sessionRecordingDataLogic(props),
@@ -471,16 +480,24 @@ export const sessionRecordingPlayerLogic = kea(
return
}
+ const plugins: ReplayPlugin[] = []
+
+ // We don't want non-cloud products to talk to our proxy as it likely won't work, but we _do_ want local testing to work
+ if (
+ values.featureFlags[FEATURE_FLAGS.SESSION_REPLAY_CORS_PROXY] &&
+ (values.preflight?.cloud || window.location.hostname === 'localhost')
+ ) {
+ plugins.push(CorsPlugin)
+ }
+
const replayer = new Replayer(values.sessionPlayerData.snapshotsByWindowId[windowId], {
root: values.rootFrame,
- triggerFocus: false,
- insertStyleRules: [
- `.ph-no-capture { background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTYiIGhlaWdodD0iMTYiIHZpZXdCb3g9IjAgMCAxNiAxNiIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KPHJlY3Qgd2lkdGg9IjE2IiBoZWlnaHQ9IjE2IiBmaWxsPSJibGFjayIvPgo8cGF0aCBkPSJNOCAwSDE2TDAgMTZWOEw4IDBaIiBmaWxsPSIjMkQyRDJEIi8+CjxwYXRoIGQ9Ik0xNiA4VjE2SDhMMTYgOFoiIGZpbGw9IiMyRDJEMkQiLz4KPC9zdmc+Cg=="); }`,
- ],
+ ...COMMON_REPLAYER_CONFIG,
// these two settings are attempts to improve performance of running two Replayers at once
// the main player and a preview player
mouseTail: props.mode !== SessionRecordingPlayerMode.Preview,
useVirtualDom: false,
+ plugins,
})
actions.setPlayer({ replayer, windowId })
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
index f90ee397989cb..00508be3ab649 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
@@ -257,11 +257,11 @@ export function RecordingsLists({
data-attr={'expand-replay-listing-from-default-seven-days-to-twenty-one'}
onClick={() => {
setFilters({
- date_from: '-21d',
+ date_from: '-30d',
})
}}
>
- Search over the last 21 days
+ Search over the last 30 days
>
) : (
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
index 17c091b1d1fff..1a2842f934ff3 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
@@ -124,6 +124,7 @@ export function SessionRecordingsPlaylistScene(): JSX.Element {
updatePlaylist({ description: value })}
diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx
index d59ed4b674e69..73879b41e70ac 100644
--- a/frontend/src/scenes/surveys/Survey.tsx
+++ b/frontend/src/scenes/surveys/Survey.tsx
@@ -376,6 +376,7 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
className="w-max"
onClick={() => {
setSurveyValue('targeting_flag_filters', { groups: [] })
+ setSurveyValue('remove_targeting_flag', false)
}}
>
Add user targeting
@@ -386,19 +387,18 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
- {id === 'new' && (
- {
- setSurveyValue('targeting_flag_filters', undefined)
- setSurveyValue('targeting_flag', null)
- }}
- >
- Remove all user properties
-
- )}
+ {
+ setSurveyValue('targeting_flag_filters', null)
+ setSurveyValue('targeting_flag', null)
+ setSurveyValue('remove_targeting_flag', true)
+ }}
+ >
+ Remove all user properties
+
>
)}
diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx
index 80c207d3c7644..20aae5ae7af40 100644
--- a/frontend/src/scenes/surveys/SurveyView.tsx
+++ b/frontend/src/scenes/surveys/SurveyView.tsx
@@ -103,6 +103,7 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
updateSurvey({ id: id, description: value })}
@@ -319,7 +320,9 @@ function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element {
kind: NodeKind.TrendsQuery,
dateRange: {
date_from: dayjs(survey.created_at).format('YYYY-MM-DD'),
- date_to: dayjs().format('YYYY-MM-DD'),
+ date_to: survey.end_date
+ ? dayjs(survey.end_date).format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD'),
},
series: [
{
diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx
index fc0de1bbb63c9..c8838275bb32a 100644
--- a/frontend/src/scenes/surveys/surveyLogic.tsx
+++ b/frontend/src/scenes/surveys/surveyLogic.tsx
@@ -55,7 +55,7 @@ export const defaultSurveyAppearance = {
thankYouMessageHeader: 'Thank you for your feedback!',
}
-const NEW_SURVEY: NewSurvey = {
+export const NEW_SURVEY: NewSurvey = {
id: 'new',
name: '',
description: '',
@@ -254,17 +254,29 @@ export const surveyLogic = kea([
if (surveyId === 'new') {
return null
}
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).add(1, 'day').format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
- const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'`
- const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'`
+ const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' `
+ const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}'`
return {
surveysShown: {
kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysShownHogqlQuery },
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysShownHogqlQuery,
+ },
+ showTimings: false,
},
surveysDismissed: {
kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysDismissedHogqlQuery },
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysDismissedHogqlQuery,
+ },
+ showTimings: false,
},
}
},
@@ -275,15 +287,18 @@ export const surveyLogic = kea([
if (survey.id === 'new') {
return null
}
- const createdAt = (survey as Survey).created_at
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).add(1, 'day').format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
return {
kind: NodeKind.InsightVizNode,
source: {
kind: NodeKind.TrendsQuery,
dateRange: {
- date_from: dayjs(createdAt).format('YYYY-MM-DD'),
- date_to: dayjs().format('YYYY-MM-DD'),
+ date_from: startDate,
+ date_to: endDate,
},
properties: [
{
@@ -304,8 +319,17 @@ export const surveyLogic = kea([
surveyMultipleChoiceQuery: [
(s) => [s.survey],
(survey): DataTableNode | null => {
- const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc`
- const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc`
+ if (survey.id === 'new') {
+ return null
+ }
+
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).add(1, 'day').format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
+
+ const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc`
+ const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc`
return {
kind: NodeKind.DataTableNode,
source: {
@@ -315,13 +339,14 @@ export const surveyLogic = kea([
? singleChoiceQuery
: multipleChoiceQuery,
},
+ showTimings: false,
}
},
],
hasTargetingFlag: [
(s) => [s.survey],
(survey): boolean => {
- return !!survey.targeting_flag || !!(survey.id === 'new' && survey.targeting_flag_filters)
+ return !!survey.targeting_flag || !!survey.targeting_flag_filters
},
],
}),
diff --git a/frontend/src/styles/utilities.scss b/frontend/src/styles/utilities.scss
index 126d981427e89..745375f1c3f57 100644
--- a/frontend/src/styles/utilities.scss
+++ b/frontend/src/styles/utilities.scss
@@ -919,6 +919,13 @@ $decorations: underline, overline, line-through, no-underline;
}
}
+.list-inside {
+ list-style-position: inside;
+}
+.list-outside {
+ list-style-position: outside;
+}
+
.shadow {
box-shadow: var(--shadow-elevation);
}
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index cb40bbbaf4e09..32e0e2862ca3f 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -1793,7 +1793,6 @@ export enum RecordingWindowFilter {
export interface EditorFilterProps {
query: InsightQueryNode
- setQuery: (node: InsightQueryNode) => void
insightProps: InsightLogicProps
}
@@ -2055,6 +2054,7 @@ export interface InsightLogicProps {
doNotLoad?: boolean
/** query when used as ad-hoc insight */
query?: InsightVizNode
+ setQuery?: (node: InsightVizNode) => void
}
export interface SetInsightOptions {
@@ -2074,7 +2074,7 @@ export interface Survey {
linked_flag: FeatureFlagBasicType | null
targeting_flag: FeatureFlagBasicType | null
targeting_flag_filters: Pick | undefined
- conditions: { url: string; selector: string; is_headless?: boolean } | null
+ conditions: { url: string; selector: string; is_headless?: boolean; seenSurveyWaitPeriodInDays?: number } | null
appearance: SurveyAppearance
questions: (BasicSurveyQuestion | LinkSurveyQuestion | RatingSurveyQuestion | MultipleSurveyQuestion)[]
created_at: string
@@ -2082,6 +2082,7 @@ export interface Survey {
start_date: string | null
end_date: string | null
archived: boolean
+ remove_targeting_flag?: boolean
}
export enum SurveyType {
@@ -2113,7 +2114,7 @@ interface SurveyQuestionBase {
}
export interface BasicSurveyQuestion extends SurveyQuestionBase {
- type: SurveyQuestionType.Open | SurveyQuestionType.NPS
+ type: SurveyQuestionType.Open
}
export interface LinkSurveyQuestion extends SurveyQuestionBase {
@@ -2140,7 +2141,6 @@ export enum SurveyQuestionType {
Open = 'open',
MultipleChoice = 'multiple_choice',
SingleChoice = 'single_choice',
- NPS = 'nps',
Rating = 'rating',
Link = 'link',
}
@@ -2191,6 +2191,7 @@ export interface FeatureFlagType extends Omit frontend/src/queries/schema.json && prettier --write frontend/src/queries/schema.json",
- "schema:build:python": "datamodel-codegen --collapse-root-models --disable-timestamp --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py && black posthog/schema.py",
+ "schema:build:python": "datamodel-codegen --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && black posthog/schema.py",
"grammar:build": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4",
"packages:build": "pnpm packages:build:apps-common && pnpm packages:build:lemon-ui",
"packages:build:apps-common": "cd frontend/@posthog/apps-common && pnpm i && pnpm build",
@@ -73,7 +73,7 @@
"@monaco-editor/react": "4.4.6",
"@posthog/plugin-scaffold": "^1.3.2",
"@react-hook/size": "^2.1.2",
- "@rrweb/types": "^2.0.0-alpha.9",
+ "@rrweb/types": "^2.0.0-alpha.11",
"@sentry/react": "7.22.0",
"@testing-library/dom": ">=7.21.4",
"@tiptap/core": "^2.1.0-rc.12",
@@ -125,7 +125,8 @@
"kea-window-values": "^3.0.0",
"md5": "^2.3.0",
"monaco-editor": "^0.39.0",
- "posthog-js": "1.78.2",
+ "papaparse": "^5.4.1",
+ "posthog-js": "1.78.5",
"posthog-js-lite": "2.0.0-alpha5",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
@@ -154,7 +155,7 @@
"react-virtualized": "^9.22.5",
"require-from-string": "^2.0.2",
"resize-observer-polyfill": "^1.5.1",
- "rrweb": "^2.0.0-alpha.9",
+ "rrweb": "^2.0.0-alpha.11",
"sass": "^1.26.2",
"use-debounce": "^9.0.3",
"use-resize-observer": "^8.0.0",
@@ -206,6 +207,7 @@
"@types/jest-image-snapshot": "^6.1.0",
"@types/md5": "^2.3.0",
"@types/node": "^18.11.9",
+ "@types/papaparse": "^5.3.8",
"@types/pixelmatch": "^5.2.4",
"@types/pngjs": "^6.0.1",
"@types/query-selector-shadow-dom": "^1.0.0",
@@ -294,9 +296,6 @@
"!(posthog/hogql/grammar/*)*.{py,pyi}": [
"black",
"ruff"
- ],
- "*.png": [
- "optipng -clobber -o4 -strip all"
]
},
"browserslist": {
diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png
index 9af7ece13c6b0..3b185216c6362 100644
Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png differ
diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png
index 197488d77b494..8b8203a70dcf5 100644
Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png differ
diff --git a/plugin-server/functional_tests/webhooks.test.ts b/plugin-server/functional_tests/webhooks.test.ts
index 0fb7155790034..82f1bfe9bf186 100644
--- a/plugin-server/functional_tests/webhooks.test.ts
+++ b/plugin-server/functional_tests/webhooks.test.ts
@@ -199,6 +199,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
properties: {
$creator_event_uuid: eventUuid,
$initial_current_url: 'http://localhost:8000',
+ $current_url: 'http://localhost:8000',
email: 't@t.com',
},
uuid: expect.any(String),
@@ -208,6 +209,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
$sent_at: expect.any(String),
$set: {
email: 't@t.com',
+ $current_url: 'http://localhost:8000',
},
$set_once: {
$initial_current_url: 'http://localhost:8000',
diff --git a/plugin-server/jest.setup.fetch-mock.js b/plugin-server/jest.setup.fetch-mock.js
index 151debe7538c5..60e240a898220 100644
--- a/plugin-server/jest.setup.fetch-mock.js
+++ b/plugin-server/jest.setup.fetch-mock.js
@@ -6,7 +6,11 @@ import fetch from 'node-fetch'
import { status } from './src/utils/status'
-jest.mock('node-fetch')
+jest.mock('node-fetch', () => ({
+ __esModule: true,
+ ...jest.requireActual('node-fetch'), // Only mock fetch(), leave Request, Response, FetchError, etc. alone
+ default: jest.fn(),
+}))
beforeEach(() => {
const responsesToUrls = {
@@ -21,7 +25,7 @@ beforeEach(() => {
]),
}
- fetch.mockImplementation(
+ jest.mocked(fetch).mockImplementation(
(url, options) =>
new Promise((resolve) =>
resolve({
diff --git a/plugin-server/package.json b/plugin-server/package.json
index be9bebdd9b2cc..d13ed75c16f91 100644
--- a/plugin-server/package.json
+++ b/plugin-server/package.json
@@ -11,6 +11,7 @@
"start": "pnpm start:dist",
"start:dist": "BASE_DIR=.. node dist/index.js",
"start:dev": "NODE_ENV=dev BASE_DIR=.. nodemon --watch src/ --exec node -r @swc-node/register src/index.ts",
+ "start:devNoWatch": "NODE_ENV=dev BASE_DIR=.. node -r @swc-node/register src/index.ts",
"build": "pnpm clean && pnpm compile",
"clean": "rm -rf dist/*",
"typescript:compile": "tsc -b",
@@ -126,6 +127,7 @@
"eslint-plugin-prettier": "^4.2.1",
"eslint-plugin-promise": "^6.0.0",
"eslint-plugin-simple-import-sort": "^7.0.0",
+ "ipaddr.js": "^2.1.0",
"jest": "^28.1.1",
"nodemon": "^2.0.22",
"parse-prometheus-text-format": "^1.1.1",
diff --git a/plugin-server/pnpm-lock.yaml b/plugin-server/pnpm-lock.yaml
index ffb5724f5ec11..9389350aa76ac 100644
--- a/plugin-server/pnpm-lock.yaml
+++ b/plugin-server/pnpm-lock.yaml
@@ -279,6 +279,9 @@ devDependencies:
eslint-plugin-simple-import-sort:
specifier: ^7.0.0
version: 7.0.0(eslint@8.39.0)
+ ipaddr.js:
+ specifier: ^2.1.0
+ version: 2.1.0
jest:
specifier: ^28.1.1
version: 28.1.3(@types/node@16.18.25)(ts-node@10.9.1)
@@ -7510,6 +7513,11 @@ packages:
resolution: {integrity: sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==}
dev: false
+ /ipaddr.js@2.1.0:
+ resolution: {integrity: sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ==}
+ engines: {node: '>= 10'}
+ dev: true
+
/is-arguments@1.1.1:
resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==}
engines: {node: '>= 0.4'}
diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts
index a403257cdb87e..a4da5a5f160e8 100644
--- a/plugin-server/src/config/config.ts
+++ b/plugin-server/src/config/config.ts
@@ -7,6 +7,8 @@ import {
KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW,
} from './kafka-topics'
+export const DEFAULT_HTTP_SERVER_PORT = 6738
+
export const defaultConfig = overrideWithEnv(getDefaultConfig())
export function getDefaultConfig(): PluginsServerConfig {
@@ -44,6 +46,7 @@ export function getDefaultConfig(): PluginsServerConfig {
KAFKA_SASL_PASSWORD: undefined,
KAFKA_CLIENT_RACK: undefined,
KAFKA_CONSUMPTION_USE_RDKAFKA: false, // Transitional setting, ignored for consumers that only support one library
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: true, // If true, use the cooperative rebalance strategy, otherwise uses the default ('range,roundrobin')
KAFKA_CONSUMPTION_MAX_BYTES: 10_485_760, // Default value for kafkajs
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: 1_048_576, // Default value for kafkajs, must be bigger than message size
KAFKA_CONSUMPTION_MAX_WAIT_MS: 1_000, // Down from the 5s default for kafkajs
@@ -74,6 +77,7 @@ export function getDefaultConfig(): PluginsServerConfig {
SENTRY_DSN: null,
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: 0,
SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: 0,
+ HTTP_SERVER_PORT: DEFAULT_HTTP_SERVER_PORT,
STATSD_HOST: null,
STATSD_PORT: 8125,
STATSD_PREFIX: 'plugin-server.',
@@ -107,6 +111,7 @@ export function getDefaultConfig(): PluginsServerConfig {
CONVERSION_BUFFER_ENABLED_TEAMS: '',
CONVERSION_BUFFER_TOPIC_ENABLED_TEAMS: '',
BUFFER_CONVERSION_SECONDS: isDevEnv() ? 2 : 60, // KEEP IN SYNC WITH posthog/settings/ingestion.py
+ FETCH_HOSTNAME_GUARD_TEAMS: '',
PERSON_INFO_CACHE_TTL: 5 * 60, // 5 min
KAFKA_HEALTHCHECK_SECONDS: 20,
OBJECT_STORAGE_ENABLED: true,
@@ -125,7 +130,7 @@ export function getDefaultConfig(): PluginsServerConfig {
APP_METRICS_GATHERED_FOR_ALL: isDevEnv() ? true : false,
MAX_TEAM_ID_TO_BUFFER_ANONYMOUS_EVENTS_FOR: 0,
USE_KAFKA_FOR_SCHEDULED_TASKS: true,
- CLOUD_DEPLOYMENT: 'default', // Used as a Sentry tag
+ CLOUD_DEPLOYMENT: null,
STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes
STARTUP_PROFILE_CPU: false,
diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts
index 3acae7a88b57d..03c9e2de6db37 100644
--- a/plugin-server/src/kafka/batch-consumer.ts
+++ b/plugin-server/src/kafka/batch-consumer.ts
@@ -33,6 +33,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs,
eachBatch,
autoCommit = true,
+ cooperativeRebalance = true,
queuedMinMessages = 100000,
}: {
connectionConfig: GlobalConfig
@@ -48,6 +49,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs: number
eachBatch: (messages: Message[]) => Promise
autoCommit?: boolean
+ cooperativeRebalance?: boolean
queuedMinMessages?: number
}): Promise => {
// Starts consuming from `topic` in batches of `fetchBatchSize` messages,
@@ -113,12 +115,12 @@ export const startBatchConsumer = async ({
// https://www.confluent.io/en-gb/blog/incremental-cooperative-rebalancing-in-kafka/
// for details on the advantages of this rebalancing strategy as well as
// how it works.
- 'partition.assignment.strategy': 'cooperative-sticky',
+ 'partition.assignment.strategy': cooperativeRebalance ? 'cooperative-sticky' : 'range,roundrobin',
rebalance_cb: true,
offset_commit_cb: true,
})
- instrumentConsumerMetrics(consumer, groupId)
+ instrumentConsumerMetrics(consumer, groupId, cooperativeRebalance)
let isShuttingDown = false
let lastLoopTime = Date.now()
diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts
index 32a6594009f7a..62b8e951ebc9f 100644
--- a/plugin-server/src/kafka/consumer.ts
+++ b/plugin-server/src/kafka/consumer.ts
@@ -59,17 +59,20 @@ export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => {
export function countPartitionsPerTopic(assignments: Assignment[]): Map {
const partitionsPerTopic = new Map()
for (const assignment of assignments) {
- if (assignment.topic in partitionsPerTopic) {
+ if (partitionsPerTopic.has(assignment.topic)) {
partitionsPerTopic.set(assignment.topic, partitionsPerTopic.get(assignment.topic) + 1)
} else {
partitionsPerTopic.set(assignment.topic, 1)
}
}
-
return partitionsPerTopic
}
-export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: string) => {
+export const instrumentConsumerMetrics = (
+ consumer: RdKafkaConsumer,
+ groupId: string,
+ cooperativeRebalance: boolean
+) => {
// For each message consumed, we record the latest timestamp processed for
// each partition assigned to this consumer group member. This consumer
// should only provide metrics for the partitions that are assigned to it,
@@ -94,6 +97,7 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
//
// TODO: add other relevant metrics here
// TODO: expose the internal librdkafka metrics as well.
+ const strategyString = cooperativeRebalance ? 'cooperative' : 'eager'
consumer.on('rebalance', (error: LibrdKafkaError, assignments: TopicPartition[]) => {
/**
* see https://github.com/Blizzard/node-rdkafka#rebalancing errors are used to signal
@@ -103,14 +107,22 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
* And when the balancing is completed the new assignments are received with ERR__ASSIGN_PARTITIONS
*/
if (error.code === CODES.ERRORS.ERR__ASSIGN_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance, partitions assigned', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance, partitions assigned`, { assignments })
for (const [topic, count] of countPartitionsPerTopic(assignments)) {
- kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count)
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
}
} else if (error.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance started, partitions revoked', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance started, partitions revoked`, { assignments })
for (const [topic, count] of countPartitionsPerTopic(assignments)) {
- kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count)
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
}
} else {
// We had a "real" error
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
index a97d034778ac4..4d12925f0ce6b 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
@@ -4,20 +4,18 @@ import { EachBatchPayload, KafkaMessage } from 'kafkajs'
import { RawClickHouseEvent } from '../../../types'
import { convertToIngestionEvent } from '../../../utils/event'
import { status } from '../../../utils/status'
-import { groupIntoBatches } from '../../../utils/utils'
import { runInstrumentedFunction } from '../../utils'
import { KafkaJSIngestionConsumer } from '../kafka-queue'
import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
+import { eachBatchHandlerHelper } from './each-batch-webhooks'
// Must require as `tsc` strips unused `import` statements and just requiring this seems to init some globals
require('@sentry/tracing')
export async function eachMessageAppsOnEventHandlers(
- message: KafkaMessage,
+ clickHouseEvent: RawClickHouseEvent,
queue: KafkaJSIngestionConsumer
): Promise {
- const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
-
const pluginConfigs = queue.pluginsServer.pluginConfigsPerTeam.get(clickHouseEvent.team_id)
if (pluginConfigs) {
// Elements parsing can be extremely slow, so we skip it for some plugins
@@ -50,7 +48,14 @@ export async function eachBatchAppsOnEventHandlers(
payload: EachBatchPayload,
queue: KafkaJSIngestionConsumer
): Promise {
- await eachBatch(payload, queue, eachMessageAppsOnEventHandlers, groupIntoBatches, 'async_handlers_on_event')
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => queue.pluginsServer.pluginConfigsPerTeam.has(teamId),
+ (event) => eachMessageAppsOnEventHandlers(event, queue),
+ queue.pluginsServer.statsd,
+ queue.pluginsServer.WORKER_CONCURRENCY * queue.pluginsServer.TASKS_PER_WORKER,
+ 'on_event'
+ )
}
export async function eachBatch(
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
index 427297a613b1b..fb671f0cd9633 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
@@ -17,10 +17,10 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
require('@sentry/tracing')
// exporting only for testing
-export function groupIntoBatchesWebhooks(
+export function groupIntoBatchesByUsage(
array: KafkaMessage[],
batchSize: number,
- actionMatcher: ActionMatcher
+ shouldProcess: (teamId: number) => boolean
): { eventBatch: RawClickHouseEvent[]; lastOffset: string; lastTimestamp: string }[] {
// Most events will not trigger a webhook call, so we want to filter them out as soon as possible
// to achieve the highest effective concurrency when executing the actual HTTP calls.
@@ -32,7 +32,7 @@ export function groupIntoBatchesWebhooks(
let currentCount = 0
array.forEach((message, index) => {
const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
- if (actionMatcher.hasWebhooks(clickHouseEvent.team_id)) {
+ if (shouldProcess(clickHouseEvent.team_id)) {
currentBatch.push(clickHouseEvent)
currentCount++
} else {
@@ -58,18 +58,36 @@ export async function eachBatchWebhooksHandlers(
hookCannon: HookCommander,
statsd: StatsD | undefined,
concurrency: number
+): Promise {
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => actionMatcher.hasWebhooks(teamId),
+ (event) => eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd),
+ statsd,
+ concurrency,
+ 'webhooks'
+ )
+}
+
+export async function eachBatchHandlerHelper(
+ payload: EachBatchPayload,
+ shouldProcess: (teamId: number) => boolean,
+ eachMessageHandler: (event: RawClickHouseEvent) => Promise,
+ statsd: StatsD | undefined,
+ concurrency: number,
+ stats_key: string
): Promise {
// similar to eachBatch function in each-batch.ts, but without the dependency on the KafkaJSIngestionConsumer
// & handling the different batching return type
- const key = 'async_handlers_webhooks'
+ const key = `async_handlers_${stats_key}`
const batchStartTimer = new Date()
const loggingKey = `each_batch_${key}`
const { batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, isRunning, isStale }: EachBatchPayload = payload
- const transaction = Sentry.startTransaction({ name: `eachBatchWebhooks` })
+ const transaction = Sentry.startTransaction({ name: `eachBatch${stats_key}` })
try {
- const batchesWithOffsets = groupIntoBatchesWebhooks(batch.messages, concurrency, actionMatcher)
+ const batchesWithOffsets = groupIntoBatchesByUsage(batch.messages, concurrency, shouldProcess)
statsd?.histogram('ingest_event_batching.input_length', batch.messages.length, { key: key })
statsd?.histogram('ingest_event_batching.batch_count', batchesWithOffsets.length, { key: key })
@@ -88,9 +106,7 @@ export async function eachBatchWebhooksHandlers(
}
await Promise.all(
- eventBatch.map((event: RawClickHouseEvent) =>
- eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd).finally(() => heartbeat())
- )
+ eventBatch.map((event: RawClickHouseEvent) => eachMessageHandler(event).finally(() => heartbeat()))
)
resolveOffset(lastOffset)
diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
index da51173e0507f..7989efd4b356a 100644
--- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts
+++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
@@ -255,6 +255,7 @@ export class IngestionConsumer {
consumerMaxWaitMs: this.pluginsServer.KAFKA_CONSUMPTION_MAX_WAIT_MS,
fetchBatchSize: 500,
topicCreationTimeoutMs: this.pluginsServer.KAFKA_TOPIC_CREATION_TIMEOUT_MS,
+ cooperativeRebalance: this.pluginsServer.KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE,
eachBatch: (payload) => this.eachBatchConsumer(payload),
})
this.consumerReady = true
diff --git a/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts b/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts
index 221c33b1381e4..31a0e425a40b3 100644
--- a/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts
+++ b/plugin-server/src/main/ingestion-queues/on-event-handler-consumer.ts
@@ -85,7 +85,13 @@ export const startAsyncWebhooksHandlerConsumer = async ({
const actionManager = new ActionManager(postgres)
await actionManager.prepare()
const actionMatcher = new ActionMatcher(postgres, actionManager, statsd)
- const hookCannon = new HookCommander(postgres, teamManager, organizationManager, statsd)
+ const hookCannon = new HookCommander(
+ postgres,
+ teamManager,
+ organizationManager,
+ new Set(serverConfig.FETCH_HOSTNAME_GUARD_TEAMS.split(',').filter(String).map(Number)),
+ statsd
+ )
const concurrency = serverConfig.TASKS_PER_WORKER || 20
const pubSub = new PubSub(serverConfig, {
diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts
index 9eb77dd815e7b..eef7fdaa8b6de 100644
--- a/plugin-server/src/main/pluginsServer.ts
+++ b/plugin-server/src/main/pluginsServer.ts
@@ -447,7 +447,7 @@ export async function startPluginsServer(
}
if (capabilities.http) {
- httpServer = createHttpServer(healthChecks, analyticsEventsIngestionConsumer)
+ httpServer = createHttpServer(serverConfig.HTTP_SERVER_PORT, healthChecks, analyticsEventsIngestionConsumer)
}
// If session recordings consumer is defined, then join it. If join
diff --git a/plugin-server/src/main/services/http-server.ts b/plugin-server/src/main/services/http-server.ts
index 0d84c9815f5cf..89716d23366eb 100644
--- a/plugin-server/src/main/services/http-server.ts
+++ b/plugin-server/src/main/services/http-server.ts
@@ -5,13 +5,12 @@ import * as prometheus from 'prom-client'
import { status } from '../../utils/status'
-export const HTTP_SERVER_PORT = 6738
-
prometheus.collectDefaultMetrics()
const v8Profiler = require('v8-profiler-next')
v8Profiler.setGenerateType(1)
export function createHttpServer(
+ port: number,
healthChecks: { [service: string]: () => Promise | boolean },
analyticsEventsIngestionConsumer?: KafkaJSIngestionConsumer | IngestionConsumer
): Server {
@@ -47,7 +46,7 @@ export function createHttpServer(
// }
// }
const checkResults = await Promise.all(
- // Note that we do not ues `Promise.allSettled` here so we can
+ // Note that we do not use `Promise.allSettled` here so we can
// assume that all promises have resolved. If there was a
// rejected promise, the http server should catch it and return
// a 500 status code.
@@ -118,8 +117,8 @@ export function createHttpServer(
}
})
- server.listen(HTTP_SERVER_PORT, () => {
- status.info('🩺', `Status server listening on port ${HTTP_SERVER_PORT}`)
+ server.listen(port, () => {
+ status.info('🩺', `Status server listening on port ${port}`)
})
return server
diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts
index fc153f7e33b1b..614be33a1238d 100644
--- a/plugin-server/src/types.ts
+++ b/plugin-server/src/types.ts
@@ -129,6 +129,7 @@ export interface PluginsServerConfig {
KAFKA_SASL_PASSWORD: string | undefined
KAFKA_CLIENT_RACK: string | undefined
KAFKA_CONSUMPTION_USE_RDKAFKA: boolean
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: boolean
KAFKA_CONSUMPTION_MAX_BYTES: number
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: number
KAFKA_CONSUMPTION_MAX_WAIT_MS: number // fetch.wait.max.ms rdkafka parameter
@@ -150,6 +151,7 @@ export interface PluginsServerConfig {
SENTRY_DSN: string | null
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: number // Rate of tracing in plugin server (between 0 and 1)
SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: number // Rate of profiling in plugin server (between 0 and 1)
+ HTTP_SERVER_PORT: number
STATSD_HOST: string | null
STATSD_PORT: number
STATSD_PREFIX: string
@@ -179,6 +181,7 @@ export interface PluginsServerConfig {
CONVERSION_BUFFER_ENABLED_TEAMS: string
CONVERSION_BUFFER_TOPIC_ENABLED_TEAMS: string
BUFFER_CONVERSION_SECONDS: number
+ FETCH_HOSTNAME_GUARD_TEAMS: string
PERSON_INFO_CACHE_TTL: number
KAFKA_HEALTHCHECK_SECONDS: number
OBJECT_STORAGE_ENABLED: boolean // Disables or enables the use of object storage. It will become mandatory to use object storage
@@ -199,7 +202,8 @@ export interface PluginsServerConfig {
USE_KAFKA_FOR_SCHEDULED_TASKS: boolean // distribute scheduled tasks across the scheduler workers
EVENT_OVERFLOW_BUCKET_CAPACITY: number
EVENT_OVERFLOW_BUCKET_REPLENISH_RATE: number
- CLOUD_DEPLOYMENT: string
+ /** Label of the PostHog Cloud environment. Null if not running PostHog Cloud. @example 'US' */
+ CLOUD_DEPLOYMENT: string | null
// dump profiles to disk, covering the first N seconds of runtime
STARTUP_PROFILE_DURATION_SECONDS: number
@@ -265,6 +269,7 @@ export interface Hub extends PluginsServerConfig {
lastActivityType: string
statelessVms: StatelessVmMap
conversionBufferEnabledTeams: Set
+ fetchHostnameGuardTeams: Set
// functions
enqueuePluginJob: (job: EnqueuedPluginJob) => Promise
// ValueMatchers used for various opt-in/out features
diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts
index 710a163752a6b..a3ee16667d2f3 100644
--- a/plugin-server/src/utils/db/hub.ts
+++ b/plugin-server/src/utils/db/hub.ts
@@ -70,6 +70,9 @@ export async function createHub(
const conversionBufferEnabledTeams = new Set(
serverConfig.CONVERSION_BUFFER_ENABLED_TEAMS.split(',').filter(String).map(Number)
)
+ const fetchHostnameGuardTeams = new Set(
+ serverConfig.FETCH_HOSTNAME_GUARD_TEAMS.split(',').filter(String).map(Number)
+ )
const statsd: StatsD | undefined = createStatsdClient(serverConfig, threadId)
@@ -91,7 +94,6 @@ export async function createHub(
: undefined,
rejectUnauthorized: serverConfig.CLICKHOUSE_CA ? false : undefined,
})
- await clickhouse.querying('SELECT 1') // test that the connection works
status.info('👍', `ClickHouse ready`)
status.info('🤔', `Connecting to Kafka...`)
@@ -182,6 +184,7 @@ export async function createHub(
rootAccessManager,
promiseManager,
conversionBufferEnabledTeams,
+ fetchHostnameGuardTeams,
pluginConfigsToSkipElementsParsing: buildIntegerMatcher(process.env.SKIP_ELEMENTS_PARSING_PLUGINS, true),
}
diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts
index 49db8914194f6..9e4eb0a3c11b7 100644
--- a/plugin-server/src/utils/db/utils.ts
+++ b/plugin-server/src/utils/db/utils.ts
@@ -39,7 +39,22 @@ export function timeoutGuard(
}, timeout)
}
-const campaignParams = new Set([
+const eventToPersonProperties = new Set([
+ // mobile params
+ '$app_build',
+ '$app_name',
+ '$app_namespace',
+ '$app_version',
+ // web params
+ '$browser',
+ '$browser_version',
+ '$device_type',
+ '$current_url',
+ '$pathname',
+ '$os',
+ '$referring_domain',
+ '$referrer',
+ // campaign params
'utm_source',
'utm_medium',
'utm_campaign',
@@ -50,31 +65,29 @@ const campaignParams = new Set([
'fbclid',
'msclkid',
])
-const initialParams = new Set([
- '$browser',
- '$browser_version',
- '$device_type',
- '$current_url',
- '$pathname',
- '$os',
- '$referring_domain',
- '$referrer',
-])
-const combinedParams = new Set([...campaignParams, ...initialParams])
/** If we get new UTM params, make sure we set those **/
export function personInitialAndUTMProperties(properties: Properties): Properties {
const propertiesCopy = { ...properties }
- const maybeSet = Object.entries(properties).filter(([key]) => campaignParams.has(key))
- const maybeSetInitial = Object.entries(properties)
- .filter(([key]) => combinedParams.has(key))
- .map(([key, value]) => [`$initial_${key.replace('$', '')}`, value])
- if (Object.keys(maybeSet).length > 0) {
+ const propertiesForPerson: [string, any][] = Object.entries(properties).filter(([key]) =>
+ eventToPersonProperties.has(key)
+ )
+
+ // all potential params are checked for $initial_ values and added to $set_once
+ const maybeSetOnce: [string, any][] = propertiesForPerson.map(([key, value]) => [
+ `$initial_${key.replace('$', '')}`,
+ value,
+ ])
+
+ // all found are also then added to $set
+ const maybeSet: [string, any][] = propertiesForPerson
+
+ if (maybeSet.length > 0) {
propertiesCopy.$set = { ...(properties.$set || {}), ...Object.fromEntries(maybeSet) }
}
- if (Object.keys(maybeSetInitial).length > 0) {
- propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetInitial) }
+ if (maybeSetOnce.length > 0) {
+ propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetOnce) }
}
return propertiesCopy
}
diff --git a/plugin-server/src/utils/env-utils.ts b/plugin-server/src/utils/env-utils.ts
index 0b343f09fc8e7..4c2ab7d173183 100644
--- a/plugin-server/src/utils/env-utils.ts
+++ b/plugin-server/src/utils/env-utils.ts
@@ -40,6 +40,8 @@ export const isTestEnv = (): boolean => determineNodeEnv() === NodeEnv.Test
export const isDevEnv = (): boolean => determineNodeEnv() === NodeEnv.Development
export const isProdEnv = (): boolean => determineNodeEnv() === NodeEnv.Production
+export const isCloud = (): boolean => !!process.env.CLOUD_DEPLOYMENT
+
export function isIngestionOverflowEnabled(): boolean {
const ingestionOverflowEnabled = process.env.INGESTION_OVERFLOW_ENABLED
return stringToBoolean(ingestionOverflowEnabled)
diff --git a/plugin-server/src/utils/fetch.ts b/plugin-server/src/utils/fetch.ts
index c45166edd0d1c..298e9e70debad 100644
--- a/plugin-server/src/utils/fetch.ts
+++ b/plugin-server/src/utils/fetch.ts
@@ -1,21 +1,73 @@
// This module wraps node-fetch with a sentry tracing-aware extension
-import fetch, { FetchError, Request, Response } from 'node-fetch'
+import { LookupAddress } from 'dns'
+import dns from 'dns/promises'
+import * as ipaddr from 'ipaddr.js'
+import fetch, { type RequestInfo, type RequestInit, type Response, FetchError, Request } from 'node-fetch'
+import { URL } from 'url'
import { runInSpan } from '../sentry'
-function fetchWrapper(...args: Parameters): Promise {
- const request = new Request(...args)
- return runInSpan(
+export async function trackedFetch(url: RequestInfo, init?: RequestInit): Promise {
+ const request = new Request(url, init)
+ return await runInSpan(
{
op: 'fetch',
description: `${request.method} ${request.url}`,
},
- () => fetch(...args)
+ async () => await fetch(url, init)
)
}
-fetchWrapper.isRedirect = fetch.isRedirect
-fetchWrapper.FetchError = FetchError
+trackedFetch.isRedirect = fetch.isRedirect
+trackedFetch.FetchError = FetchError
-export default fetchWrapper
+export async function safeTrackedFetch(url: RequestInfo, init?: RequestInit): Promise {
+ const request = new Request(url, init)
+ return await runInSpan(
+ {
+ op: 'fetch',
+ description: `${request.method} ${request.url}`,
+ },
+ async () => {
+ await raiseIfUserProvidedUrlUnsafe(request.url)
+ return await fetch(url, init)
+ }
+ )
+}
+
+safeTrackedFetch.isRedirect = fetch.isRedirect
+safeTrackedFetch.FetchError = FetchError
+
+/**
+ * Raise if the provided URL seems unsafe, otherwise do nothing.
+ *
+ * Equivalent of Django raise_if_user_provided_url_unsafe.
+ */
+export async function raiseIfUserProvidedUrlUnsafe(url: string): Promise {
+ // Raise if the provided URL seems unsafe, otherwise do nothing.
+ let parsedUrl: URL
+ try {
+ parsedUrl = new URL(url)
+ } catch (err) {
+ throw new FetchError('Invalid URL', 'posthog-host-guard')
+ }
+ if (!parsedUrl.hostname) {
+ throw new FetchError('No hostname', 'posthog-host-guard')
+ }
+ if (parsedUrl.protocol !== 'http:' && parsedUrl.protocol !== 'https:') {
+ throw new FetchError('Scheme must be either HTTP or HTTPS', 'posthog-host-guard')
+ }
+ let addrinfo: LookupAddress[]
+ try {
+ addrinfo = await dns.lookup(parsedUrl.hostname, { all: true })
+ } catch (err) {
+ throw new FetchError('Invalid hostname', 'posthog-host-guard')
+ }
+ for (const { address } of addrinfo) {
+ // Prevent addressing internal services
+ if (ipaddr.parse(address).range() !== 'unicast') {
+ throw new FetchError('Internal hostname', 'posthog-host-guard')
+ }
+ }
+}
diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts
index 69c56640bf886..aace016721449 100644
--- a/plugin-server/src/utils/utils.ts
+++ b/plugin-server/src/utils/utils.ts
@@ -312,14 +312,6 @@ export function escapeClickHouseString(string: string): string {
return string.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
}
-export function groupIntoBatches(array: T[], batchSize: number): T[][] {
- const batches = []
- for (let i = 0; i < array.length; i += batchSize) {
- batches.push(array.slice(i, i + batchSize))
- }
- return batches
-}
-
/** Standardize JS code used internally to form without extraneous indentation. Template literal function. */
export function code(strings: TemplateStringsArray): string {
const stringsConcat = strings.join('…')
diff --git a/plugin-server/src/worker/ingestion/hooks.ts b/plugin-server/src/worker/ingestion/hooks.ts
index e3c15a9329135..2cc8279c88d52 100644
--- a/plugin-server/src/worker/ingestion/hooks.ts
+++ b/plugin-server/src/worker/ingestion/hooks.ts
@@ -5,7 +5,8 @@ import { format } from 'util'
import { Action, Hook, PostIngestionEvent, Team } from '../../types'
import { PostgresRouter, PostgresUse } from '../../utils/db/postgres'
-import fetch from '../../utils/fetch'
+import { isCloud } from '../../utils/env-utils'
+import { safeTrackedFetch, trackedFetch } from '../../utils/fetch'
import { status } from '../../utils/status'
import { getPropertyValueByPath, stringify } from '../../utils/utils'
import { OrganizationManager } from './organization-manager'
@@ -256,6 +257,7 @@ export class HookCommander {
organizationManager: OrganizationManager
statsd: StatsD | undefined
siteUrl: string
+ fetchHostnameGuardTeams: Set
/** Hook request timeout in ms. */
EXTERNAL_REQUEST_TIMEOUT = 10 * 1000
@@ -264,11 +266,13 @@ export class HookCommander {
postgres: PostgresRouter,
teamManager: TeamManager,
organizationManager: OrganizationManager,
+ fetchHostnameGuardTeams?: Set,
statsd?: StatsD
) {
this.postgres = postgres
this.teamManager = teamManager
this.organizationManager = organizationManager
+ this.fetchHostnameGuardTeams = fetchHostnameGuardTeams || new Set()
if (process.env.SITE_URL) {
this.siteUrl = process.env.SITE_URL
} else {
@@ -358,9 +362,10 @@ export class HookCommander {
`⌛⌛⌛ Posting Webhook slow. Timeout warning after 5 sec! url=${webhookUrl} team_id=${team.id} event_id=${event.eventUuid}`
)
}, 5000)
+ const relevantFetch = isCloud() && this.fetchHostnameGuardTeams.has(team.id) ? safeTrackedFetch : trackedFetch
try {
await instrumentWebhookStep('fetch', async () => {
- const request = await fetch(webhookUrl, {
+ const request = await relevantFetch(webhookUrl, {
method: 'POST',
body: JSON.stringify(message, undefined, 4),
headers: { 'Content-Type': 'application/json' },
@@ -399,8 +404,10 @@ export class HookCommander {
`⌛⌛⌛ Posting RestHook slow. Timeout warning after 5 sec! url=${hook.target} team_id=${event.teamId} event_id=${event.eventUuid}`
)
}, 5000)
+ const relevantFetch =
+ isCloud() && this.fetchHostnameGuardTeams.has(hook.team_id) ? safeTrackedFetch : trackedFetch
try {
- const request = await fetch(hook.target, {
+ const request = await relevantFetch(hook.target, {
method: 'POST',
body: JSON.stringify(payload, undefined, 4),
headers: { 'Content-Type': 'application/json' },
diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts
index 3941ef725af0e..44327a6a8bfd5 100644
--- a/plugin-server/src/worker/ingestion/process-event.ts
+++ b/plugin-server/src/worker/ingestion/process-event.ts
@@ -281,6 +281,8 @@ export interface SummarizedSessionRecordingEvent {
console_warn_count: number
console_error_count: number
size: number
+ event_count: number
+ message_count: number
}
export const createSessionReplayEvent = (
@@ -357,6 +359,8 @@ export const createSessionReplayEvent = (
console_warn_count: Math.trunc(consoleWarnCount),
console_error_count: Math.trunc(consoleErrorCount),
size: Math.trunc(Buffer.byteLength(JSON.stringify(events), 'utf8')),
+ event_count: Math.trunc(events.length),
+ message_count: 1,
}
return data
diff --git a/plugin-server/src/worker/plugins/mmdb.ts b/plugin-server/src/worker/plugins/mmdb.ts
index a825c931c4da2..7321238b2ba31 100644
--- a/plugin-server/src/worker/plugins/mmdb.ts
+++ b/plugin-server/src/worker/plugins/mmdb.ts
@@ -1,5 +1,6 @@
import { Reader, ReaderModel } from '@maxmind/geoip2-node'
import { DateTime } from 'luxon'
+import fetch from 'node-fetch'
import * as schedule from 'node-schedule'
import prettyBytes from 'pretty-bytes'
import { brotliDecompress } from 'zlib'
@@ -12,7 +13,6 @@ import {
} from '../../config/mmdb-constants'
import { Hub, PluginAttachmentDB } from '../../types'
import { PostgresUse } from '../../utils/db/postgres'
-import fetch from '../../utils/fetch'
import { status } from '../../utils/status'
import { delay } from '../../utils/utils'
diff --git a/plugin-server/src/worker/vm/imports.ts b/plugin-server/src/worker/vm/imports.ts
index bcb9648974934..d7b02d87c1c41 100644
--- a/plugin-server/src/worker/vm/imports.ts
+++ b/plugin-server/src/worker/vm/imports.ts
@@ -12,33 +12,37 @@ import * as jsonwebtoken from 'jsonwebtoken'
import * as pg from 'pg'
import snowflake from 'snowflake-sdk'
import { PassThrough } from 'stream'
+import { Hub } from 'types'
import * as url from 'url'
import * as zlib from 'zlib'
-import fetch from '../../utils/fetch'
+import { isCloud, isTestEnv } from '../../utils/env-utils'
+import { safeTrackedFetch, trackedFetch } from '../../utils/fetch'
import { writeToFile } from './extensions/test-utils'
-export const imports = {
- ...(process.env.NODE_ENV === 'test'
- ? {
- 'test-utils/write-to-file': writeToFile,
- }
- : {}),
- '@google-cloud/bigquery': bigquery,
- '@google-cloud/pubsub': pubsub,
- '@google-cloud/storage': gcs,
- '@posthog/plugin-contrib': contrib,
- '@posthog/plugin-scaffold': scaffold,
- 'aws-sdk': AWS,
- ethers: ethers,
- 'generic-pool': genericPool,
- 'node-fetch': fetch,
- 'snowflake-sdk': snowflake,
- crypto: crypto,
- jsonwebtoken: jsonwebtoken,
- faker: faker,
- pg: pg,
- stream: { PassThrough },
- url: url,
- zlib: zlib,
+export function determineImports(hub: Hub, teamId: number) {
+ return {
+ ...(isTestEnv()
+ ? {
+ 'test-utils/write-to-file': writeToFile,
+ }
+ : {}),
+ '@google-cloud/bigquery': bigquery,
+ '@google-cloud/pubsub': pubsub,
+ '@google-cloud/storage': gcs,
+ '@posthog/plugin-contrib': contrib,
+ '@posthog/plugin-scaffold': scaffold,
+ 'aws-sdk': AWS,
+ ethers: ethers,
+ 'generic-pool': genericPool,
+ 'node-fetch': isCloud() && hub.fetchHostnameGuardTeams.has(teamId) ? safeTrackedFetch : trackedFetch,
+ 'snowflake-sdk': snowflake,
+ crypto: crypto,
+ jsonwebtoken: jsonwebtoken,
+ faker: faker,
+ pg: pg,
+ stream: { PassThrough },
+ url: url,
+ zlib: zlib,
+ }
}
diff --git a/plugin-server/src/worker/vm/vm.ts b/plugin-server/src/worker/vm/vm.ts
index 967701cdff887..95e40ca4a6da8 100644
--- a/plugin-server/src/worker/vm/vm.ts
+++ b/plugin-server/src/worker/vm/vm.ts
@@ -11,7 +11,7 @@ import { createJobs } from './extensions/jobs'
import { createPosthog } from './extensions/posthog'
import { createStorage } from './extensions/storage'
import { createUtils } from './extensions/utilities'
-import { imports } from './imports'
+import { determineImports } from './imports'
import { transformCode } from './transforms'
import { upgradeExportEvents } from './upgrades/export-events'
import { addHistoricalEventsExportCapability } from './upgrades/historical-export/export-historical-events'
@@ -34,6 +34,8 @@ export function createPluginConfigVM(
pluginConfig: PluginConfig, // NB! might have team_id = 0
indexJs: string
): PluginConfigVMResponse {
+ const imports = determineImports(hub, pluginConfig.team_id)
+
const timer = new Date()
const statsdTiming = (metric: string) => {
diff --git a/plugin-server/tests/http-server.test.ts b/plugin-server/tests/http-server.test.ts
index eed0dd1907ffc..3900168cd2039 100644
--- a/plugin-server/tests/http-server.test.ts
+++ b/plugin-server/tests/http-server.test.ts
@@ -1,7 +1,7 @@
import http from 'http'
+import { DEFAULT_HTTP_SERVER_PORT } from '../src/config/config'
import { startPluginsServer } from '../src/main/pluginsServer'
-import { HTTP_SERVER_PORT } from '../src/main/services/http-server'
import { makePiscina } from '../src/worker/piscina'
import { resetTestDatabase } from './helpers/sql'
@@ -40,7 +40,7 @@ describe('http server', () => {
)
await new Promise((resolve) =>
- http.get(`http://localhost:${HTTP_SERVER_PORT}/_health`, (res) => {
+ http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_health`, (res) => {
const { statusCode } = res
expect(statusCode).toEqual(200)
resolve(null)
@@ -68,7 +68,7 @@ describe('http server', () => {
)
await new Promise((resolve) =>
- http.get(`http://localhost:${HTTP_SERVER_PORT}/_ready`, (res) => {
+ http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_ready`, (res) => {
const { statusCode } = res
expect(statusCode).toEqual(200)
resolve(null)
diff --git a/plugin-server/tests/main/db.test.ts b/plugin-server/tests/main/db.test.ts
index 06c23380448e3..a2a570ce0af07 100644
--- a/plugin-server/tests/main/db.test.ts
+++ b/plugin-server/tests/main/db.test.ts
@@ -165,7 +165,7 @@ describe('DB', () => {
user_id: 1001,
resource_id: 69,
event: 'action_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
created: new Date().toISOString(),
updated: new Date().toISOString(),
})
@@ -188,7 +188,7 @@ describe('DB', () => {
team_id: 2,
resource_id: 69,
event: 'action_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
},
],
bytecode: null,
@@ -226,7 +226,7 @@ describe('DB', () => {
user_id: 1001,
resource_id: 69,
event: 'event_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
created: new Date().toISOString(),
updated: new Date().toISOString(),
})
@@ -236,7 +236,7 @@ describe('DB', () => {
user_id: 1001,
resource_id: 70,
event: 'event_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
created: new Date().toISOString(),
updated: new Date().toISOString(),
})
diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
index 617978884fe29..0580f53d2724b 100644
--- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
@@ -9,13 +9,10 @@ import {
eachBatchLegacyIngestion,
splitKafkaJSIngestionBatch,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion-kafkajs'
-import {
- eachBatch,
- eachBatchAppsOnEventHandlers,
-} from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
+import { eachBatchAppsOnEventHandlers } from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
import {
eachBatchWebhooksHandlers,
- groupIntoBatchesWebhooks,
+ groupIntoBatchesByUsage,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-webhooks'
import {
ClickHouseTimestamp,
@@ -24,7 +21,6 @@ import {
PostIngestionEvent,
RawClickHouseEvent,
} from '../../../src/types'
-import { groupIntoBatches } from '../../../src/utils/utils'
import { ActionManager } from '../../../src/worker/ingestion/action-manager'
import { ActionMatcher } from '../../../src/worker/ingestion/action-matcher'
import { HookCommander } from '../../../src/worker/ingestion/hooks'
@@ -150,26 +146,6 @@ describe('eachBatchX', () => {
}
})
- describe('eachBatch', () => {
- it('calls eachMessage with the correct arguments', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- const batch = createKafkaJSBatch(event)
- await eachBatch(batch, queue, eachMessage, groupIntoBatches, 'key')
-
- expect(eachMessage).toHaveBeenCalledWith({ value: JSON.stringify(event) }, queue)
- })
-
- it('tracks metrics based on the key', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- await eachBatch(createKafkaJSBatch(event), queue, eachMessage, groupIntoBatches, 'my_key')
-
- expect(queue.pluginsServer.statsd.timing).toHaveBeenCalledWith(
- 'kafka_queue.each_batch_my_key',
- expect.any(Date)
- )
- })
- })
-
describe('eachBatchAppsOnEventHandlers', () => {
it('calls runAppsOnEventPipeline when useful', async () => {
queue.pluginsServer.pluginConfigsPerTeam.set(2, [pluginConfig39])
@@ -333,11 +309,9 @@ describe('eachBatchX', () => {
kafkaTimestamp: '2020-02-23 00:10:00.00' as ClickHouseTimestamp,
},
])
- const actionManager = new ActionManager(queue.pluginsServer.postgres)
- const actionMatcher = new ActionMatcher(queue.pluginsServer.postgres, actionManager)
- // mock hasWebhooks 10 calls, 1,3,10 should return false, others true
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result = groupIntoBatchesWebhooks(batch.batch.messages, 5, actionMatcher)
+ // teamIDs 1,3,10 should return false, others true
+ const toProcess = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
+ const result = groupIntoBatchesByUsage(batch.batch.messages, 5, toProcess)
expect(result).toEqual([
{
eventBatch: expect.arrayContaining([
@@ -375,8 +349,7 @@ describe('eachBatchX', () => {
])
// make sure that if the last message would be a new batch and if it's going to be excluded we
// still get the last batch as empty with the right offsite and timestamp
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result2 = groupIntoBatchesWebhooks(batch.batch.messages, 7, actionMatcher)
+ const result2 = groupIntoBatchesByUsage(batch.batch.messages, 7, toProcess)
expect(result2).toEqual([
{
eventBatch: expect.arrayContaining([
diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
index c0912a2ca499b..31dc19d000f3b 100644
--- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
@@ -1,4 +1,7 @@
+import { Assignment } from 'node-rdkafka-acosom'
+
import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics'
+import { countPartitionsPerTopic } from '../../../src/kafka/consumer'
import { ServerInstance, startPluginsServer } from '../../../src/main/pluginsServer'
import { LogLevel, PluginsServerConfig } from '../../../src/types'
import { Hub } from '../../../src/types'
@@ -79,3 +82,22 @@ describe.skip('IngestionConsumer', () => {
expect(bufferCalls.length).toEqual(1)
})
})
+
+describe('countPartitionsPerTopic', () => {
+ it('should correctly count the number of partitions per topic', () => {
+ const assignments: Assignment[] = [
+ { topic: 'topic1', partition: 0 },
+ { topic: 'topic1', partition: 1 },
+ { topic: 'topic2', partition: 0 },
+ { topic: 'topic2', partition: 1 },
+ { topic: 'topic2', partition: 2 },
+ { topic: 'topic3', partition: 0 },
+ ]
+
+ const result = countPartitionsPerTopic(assignments)
+ expect(result.get('topic1')).toBe(2)
+ expect(result.get('topic2')).toBe(3)
+ expect(result.get('topic3')).toBe(1)
+ expect(result.size).toBe(3)
+ })
+})
diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts
index 2e0440d454bf8..94505831b8452 100644
--- a/plugin-server/tests/main/process-event.test.ts
+++ b/plugin-server/tests/main/process-event.test.ts
@@ -315,7 +315,7 @@ test('capture new person', async () => {
let persons = await hub.db.fetchPersons()
expect(persons[0].version).toEqual(0)
expect(persons[0].created_at).toEqual(now)
- let expectedProps = {
+ let expectedProps: Record = {
$creator_event_uuid: uuid,
$initial_browser: 'Chrome',
$initial_browser_version: '95',
@@ -329,6 +329,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -343,7 +349,17 @@ test('capture new person', async () => {
expect(events[0].properties).toEqual({
$ip: '127.0.0.1',
$os: 'Mac OS X',
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
+ $set: {
+ utm_medium: 'twitter',
+ gclid: 'GOOGLE ADS ID',
+ msclkid: 'BING ADS ID',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
+ },
token: 'THIS IS NOT A TOKEN FOR TEAM 2',
$browser: 'Chrome',
$set_once: {
@@ -412,6 +428,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -425,6 +447,9 @@ test('capture new person', async () => {
expect(events[1].properties.$set).toEqual({
utm_medium: 'instagram',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
})
expect(events[1].properties.$set_once).toEqual({
$initial_browser: 'Firefox',
@@ -481,6 +506,9 @@ test('capture new person', async () => {
expect(persons[0].version).toEqual(1)
expect(events[2].properties.$set).toEqual({
+ $browser: 'Firefox',
+ $current_url: 'https://test.com/pricing',
+
utm_medium: 'instagram',
})
expect(events[2].properties.$set_once).toEqual({
@@ -1236,6 +1264,8 @@ const sessionReplayEventTestCases: {
| 'console_warn_count'
| 'console_error_count'
| 'size'
+ | 'event_count'
+ | 'message_count'
>
}[] = [
{
@@ -1252,6 +1282,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1268,6 +1300,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1324,6 +1358,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 3,
console_error_count: 1,
size: 762,
+ event_count: 7,
+ message_count: 1,
},
},
{
@@ -1362,6 +1398,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 213,
+ event_count: 2,
+ message_count: 1,
},
},
{
@@ -1389,6 +1427,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 433,
+ event_count: 6,
+ message_count: 1,
},
},
]
diff --git a/plugin-server/tests/utils/db/utils.test.ts b/plugin-server/tests/utils/db/utils.test.ts
index 5201b8e60b803..420c645472ff3 100644
--- a/plugin-server/tests/utils/db/utils.test.ts
+++ b/plugin-server/tests/utils/db/utils.test.ts
@@ -17,41 +17,74 @@ describe('personInitialAndUTMProperties()', () => {
{ tag_name: 'a', nth_child: 1, nth_of_type: 2, attr__class: 'btn btn-sm' },
{ tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
],
+ $app_build: 2,
+ $app_name: 'my app',
+ $app_namespace: 'com.posthog.myapp',
+ $app_version: '1.2.3',
}
- expect(personInitialAndUTMProperties(properties)).toEqual({
- distinct_id: 2,
- $browser: 'Chrome',
- $current_url: 'https://test.com',
- $os: 'Mac OS X',
- $browser_version: '95',
- $referring_domain: 'https://google.com',
- $referrer: 'https://google.com/?q=posthog',
- utm_medium: 'twitter',
- gclid: 'GOOGLE ADS ID',
- msclkid: 'BING ADS ID',
- $elements: [
- {
- tag_name: 'a',
- nth_child: 1,
- nth_of_type: 2,
- attr__class: 'btn btn-sm',
+ expect(personInitialAndUTMProperties(properties)).toMatchInlineSnapshot(`
+ Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$elements": Array [
+ Object {
+ "attr__class": "btn btn-sm",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "a",
},
- { tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
- ],
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
- $set_once: {
- $initial_browser: 'Chrome',
- $initial_current_url: 'https://test.com',
- $initial_os: 'Mac OS X',
- $initial_browser_version: '95',
- $initial_utm_medium: 'twitter',
- $initial_gclid: 'GOOGLE ADS ID',
- $initial_msclkid: 'BING ADS ID',
- $initial_referring_domain: 'https://google.com',
- $initial_referrer: 'https://google.com/?q=posthog',
- },
- })
+ Object {
+ "$el_text": "💻",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "div",
+ },
+ ],
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "$set": Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ },
+ "$set_once": Object {
+ "$initial_app_build": 2,
+ "$initial_app_name": "my app",
+ "$initial_app_namespace": "com.posthog.myapp",
+ "$initial_app_version": "1.2.3",
+ "$initial_browser": "Chrome",
+ "$initial_browser_version": "95",
+ "$initial_current_url": "https://test.com",
+ "$initial_gclid": "GOOGLE ADS ID",
+ "$initial_msclkid": "BING ADS ID",
+ "$initial_os": "Mac OS X",
+ "$initial_referrer": "https://google.com/?q=posthog",
+ "$initial_referring_domain": "https://google.com",
+ "$initial_utm_medium": "twitter",
+ },
+ "distinct_id": 2,
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ }
+ `)
})
it('initial current domain regression test', () => {
@@ -62,6 +95,7 @@ describe('personInitialAndUTMProperties()', () => {
expect(personInitialAndUTMProperties(properties)).toEqual({
$current_url: 'https://test.com',
$set_once: { $initial_current_url: 'https://test.com' },
+ $set: { $current_url: 'https://test.com' },
})
})
})
diff --git a/plugin-server/tests/utils/fetch.test.ts b/plugin-server/tests/utils/fetch.test.ts
new file mode 100644
index 0000000000000..d6100232a7192
--- /dev/null
+++ b/plugin-server/tests/utils/fetch.test.ts
@@ -0,0 +1,40 @@
+import { FetchError } from 'node-fetch'
+
+import { raiseIfUserProvidedUrlUnsafe } from '../../src/utils/fetch'
+
+test('raiseIfUserProvidedUrlUnsafe', async () => {
+ // Sync test cases with posthog/api/test/test_utils.py
+ await raiseIfUserProvidedUrlUnsafe('https://google.com?q=20') // Safe
+ await raiseIfUserProvidedUrlUnsafe('https://posthog.com') // Safe
+ await raiseIfUserProvidedUrlUnsafe('https://posthog.com/foo/bar') // Safe, with path
+ await raiseIfUserProvidedUrlUnsafe('https://posthog.com:443') // Safe, good port
+ await raiseIfUserProvidedUrlUnsafe('https://1.1.1.1') // Safe, public IP
+ await expect(raiseIfUserProvidedUrlUnsafe('')).rejects.toThrow(new FetchError('Invalid URL', 'posthog-host-guard'))
+ await expect(raiseIfUserProvidedUrlUnsafe('@@@')).rejects.toThrow(
+ new FetchError('Invalid URL', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('posthog.com')).rejects.toThrow(
+ new FetchError('Invalid URL', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('ftp://posthog.com')).rejects.toThrow(
+ new FetchError('Scheme must be either HTTP or HTTPS', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://localhost')).rejects.toThrow(
+ new FetchError('Internal hostname', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://192.168.0.5')).rejects.toThrow(
+ new FetchError('Internal hostname', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://0.0.0.0')).rejects.toThrow(
+ new FetchError('Internal hostname', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://10.0.0.24')).rejects.toThrow(
+ new FetchError('Internal hostname', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://172.20.0.21')).rejects.toThrow(
+ new FetchError('Internal hostname', 'posthog-host-guard')
+ )
+ await expect(raiseIfUserProvidedUrlUnsafe('http://fgtggggzzggggfd.com')).rejects.toThrow(
+ new FetchError('Invalid hostname', 'posthog-host-guard')
+ )
+})
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
index 837079da765eb..71643e2668b48 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
@@ -105,6 +105,7 @@ describe('Event Pipeline integration test', () => {
$set: {
personProp: 'value',
anotherValue: 2,
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -118,6 +119,7 @@ describe('Event Pipeline integration test', () => {
expect(persons[0].properties).toEqual({
$creator_event_uuid: event.uuid,
$initial_browser: 'Chrome',
+ $browser: 'Chrome',
personProp: 'value',
anotherValue: 2,
})
@@ -174,7 +176,7 @@ describe('Event Pipeline integration test', () => {
user_id: commonUserId,
resource_id: 69,
event: 'action_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
created: timestamp,
updated: timestamp,
} as Hook)
@@ -198,7 +200,7 @@ describe('Event Pipeline integration test', () => {
hook: {
id: 'abc',
event: 'action_performed',
- target: 'https://rest-hooks.example.com/',
+ target: 'https://example.com/',
},
data: {
event: 'xyz',
@@ -222,7 +224,7 @@ describe('Event Pipeline integration test', () => {
// Using a more verbose way instead of toHaveBeenCalledWith because we need to parse request body
// and use expect.any for a few payload properties, which wouldn't be possible in a simpler way
- expect(jest.mocked(fetch).mock.calls[0][0]).toBe('https://rest-hooks.example.com/')
+ expect(jest.mocked(fetch).mock.calls[0][0]).toBe('https://example.com/')
const secondArg = jest.mocked(fetch).mock.calls[0][1]
expect(JSON.parse(secondArg!.body as unknown as string)).toStrictEqual(expectedPayload)
expect(JSON.parse(secondArg!.body as unknown as string)).toStrictEqual(expectedPayload)
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
index 71d495bcf9bce..d2ce3aa76e383 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
@@ -85,6 +85,7 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
$browser: 'Chrome',
$set: {
someProp: 'value',
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -95,7 +96,12 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
expect.objectContaining({
id: expect.any(Number),
uuid: expect.any(String),
- properties: { $initial_browser: 'Chrome', someProp: 'value', $creator_event_uuid: expect.any(String) },
+ properties: {
+ $initial_browser: 'Chrome',
+ someProp: 'value',
+ $creator_event_uuid: expect.any(String),
+ $browser: 'Chrome',
+ },
version: 0,
is_identified: false,
})
diff --git a/plugin-server/tests/worker/ingestion/hooks.test.ts b/plugin-server/tests/worker/ingestion/hooks.test.ts
index c319ba01c3bb9..19e1f0eb684e5 100644
--- a/plugin-server/tests/worker/ingestion/hooks.test.ts
+++ b/plugin-server/tests/worker/ingestion/hooks.test.ts
@@ -1,7 +1,8 @@
import { DateTime } from 'luxon'
-import * as fetch from 'node-fetch'
+import fetch, { FetchError } from 'node-fetch'
import { Action, PostIngestionEvent, Team } from '../../../src/types'
+import { isCloud } from '../../../src/utils/env-utils'
import { UUIDT } from '../../../src/utils/utils'
import {
determineWebhookType,
@@ -15,6 +16,8 @@ import {
} from '../../../src/worker/ingestion/hooks'
import { Hook } from './../../../src/types'
+jest.mock('../../../src/utils/env-utils')
+
describe('hooks', () => {
describe('determineWebhookType', () => {
test('Slack', () => {
@@ -471,29 +474,35 @@ describe('hooks', () => {
let hook: Hook
beforeEach(() => {
- hookCommander = new HookCommander({} as any, {} as any, {} as any)
+ jest.mocked(isCloud).mockReturnValue(false) // Disable private IP guard
hook = {
id: 'id',
- team_id: 2,
+ team_id: 1,
user_id: 1,
resource_id: 1,
event: 'foo',
- target: 'foo.bar',
+ target: 'https://example.com/',
created: new Date().toISOString(),
updated: new Date().toISOString(),
}
+ hookCommander = new HookCommander(
+ {} as any,
+ {} as any,
+ {} as any,
+ new Set([hook.team_id]) // Hostname guard enabled
+ )
})
test('person = undefined', async () => {
await hookCommander.postRestHook(hook, { event: 'foo' } as any)
- expect(fetch).toHaveBeenCalledWith('foo.bar', {
+ expect(fetch).toHaveBeenCalledWith('https://example.com/', {
body: JSON.stringify(
{
hook: {
id: 'id',
event: 'foo',
- target: 'foo.bar',
+ target: 'https://example.com/',
},
data: {
event: 'foo',
@@ -510,26 +519,28 @@ describe('hooks', () => {
})
test('person data from the event', async () => {
+ jest.mocked(isCloud).mockReturnValue(true) // Enable private IP guard, which example.com should pass
+
const now = new Date().toISOString()
const uuid = new UUIDT().toString()
await hookCommander.postRestHook(hook, {
event: 'foo',
- teamId: 1,
+ teamId: hook.team_id,
person_id: uuid,
person_properties: { foo: 'bar' },
person_created_at: DateTime.fromISO(now).toUTC(),
} as any)
- expect(fetch).toHaveBeenCalledWith('foo.bar', {
+ expect(fetch).toHaveBeenCalledWith('https://example.com/', {
body: JSON.stringify(
{
hook: {
id: 'id',
event: 'foo',
- target: 'foo.bar',
+ target: 'https://example.com/',
},
data: {
event: 'foo',
- teamId: 1,
+ teamId: hook.team_id,
person: {
uuid: uuid,
properties: { foo: 'bar' },
@@ -545,5 +556,19 @@ describe('hooks', () => {
timeout: 10000,
})
})
+
+ test('private IP hook allowed on self-hosted', async () => {
+ await hookCommander.postRestHook({ ...hook, target: 'http://127.0.0.1' }, { event: 'foo' } as any)
+
+ expect(fetch).toHaveBeenCalledWith('http://127.0.0.1', expect.anything())
+ })
+
+ test('private IP hook forbidden on Cloud', async () => {
+ jest.mocked(isCloud).mockReturnValue(true)
+
+ await expect(
+ hookCommander.postRestHook({ ...hook, target: 'http://127.0.0.1' }, { event: 'foo' } as any)
+ ).rejects.toThrow(new FetchError('Internal hostname', 'posthog-host-guard'))
+ })
})
})
diff --git a/plugin-server/tests/worker/plugins/mmdb.test.ts b/plugin-server/tests/worker/plugins/mmdb.test.ts
index 9bd3769032bd6..8179191a27640 100644
--- a/plugin-server/tests/worker/plugins/mmdb.test.ts
+++ b/plugin-server/tests/worker/plugins/mmdb.test.ts
@@ -1,7 +1,7 @@
import { ReaderModel } from '@maxmind/geoip2-node'
import { readFileSync } from 'fs'
import { DateTime } from 'luxon'
-import * as fetch from 'node-fetch'
+import fetch from 'node-fetch'
import { join } from 'path'
import { Hub, LogLevel } from '../../../src/types'
diff --git a/plugin-server/tests/worker/vm.extra-lazy.test.ts b/plugin-server/tests/worker/vm.extra-lazy.test.ts
index 6f971c2e38d36..e571b2f809b59 100644
--- a/plugin-server/tests/worker/vm.extra-lazy.test.ts
+++ b/plugin-server/tests/worker/vm.extra-lazy.test.ts
@@ -1,4 +1,4 @@
-import * as fetch from 'node-fetch'
+import fetch from 'node-fetch'
import { Hub, PluginTaskType } from '../../src/types'
import { createHub } from '../../src/utils/db/hub'
@@ -39,7 +39,7 @@ describe('VMs are extra lazy 💤', () => {
expect(lazyVm.ready).toEqual(true)
expect(lazyVm.setupPluginIfNeeded).not.toHaveBeenCalled()
- expect(fetch).toHaveBeenCalledWith('https://onevent.com/')
+ expect(fetch).toHaveBeenCalledWith('https://onevent.com/', undefined)
})
test('VM with jobs gets setup immediately', async () => {
@@ -64,7 +64,7 @@ describe('VMs are extra lazy 💤', () => {
expect(lazyVm.ready).toEqual(true)
expect(lazyVm.setupPluginIfNeeded).not.toHaveBeenCalled()
- expect(fetch).toHaveBeenCalledWith('https://onevent.com/')
+ expect(fetch).toHaveBeenCalledWith('https://onevent.com/', undefined)
})
test('VM without tasks delays setup until necessary', async () => {
@@ -91,7 +91,7 @@ describe('VMs are extra lazy 💤', () => {
await lazyVm.getOnEvent()
expect(lazyVm.ready).toEqual(true)
expect(lazyVm.setupPluginIfNeeded).toHaveBeenCalled()
- expect(fetch).toHaveBeenCalledWith('https://onevent.com/')
+ expect(fetch).toHaveBeenCalledWith('https://onevent.com/', undefined)
})
test('getting methods and tasks returns null if plugin is in errored state', async () => {
diff --git a/plugin-server/tests/worker/vm.test.ts b/plugin-server/tests/worker/vm.test.ts
index 8496a94a5a2c7..138c813d5c70b 100644
--- a/plugin-server/tests/worker/vm.test.ts
+++ b/plugin-server/tests/worker/vm.test.ts
@@ -1,5 +1,5 @@
import { PluginEvent, ProcessedPluginEvent } from '@posthog/plugin-scaffold'
-import * as fetch from 'node-fetch'
+import fetch from 'node-fetch'
import { KAFKA_EVENTS_PLUGIN_INGESTION, KAFKA_PLUGIN_LOG_ENTRIES } from '../../src/config/kafka-topics'
import { Hub, PluginLogEntrySource, PluginLogEntryType } from '../../src/types'
@@ -122,7 +122,7 @@ describe('vm tests', () => {
})
expect(fetch).not.toHaveBeenCalled()
await vm.methods.teardownPlugin!()
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=hoho')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=hoho', undefined)
})
test('processEvent', async () => {
@@ -376,7 +376,7 @@ describe('vm tests', () => {
event: 'export',
}
await vm.methods.onEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=export')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=export', undefined)
})
test('export default', async () => {
@@ -395,7 +395,7 @@ describe('vm tests', () => {
event: 'default export',
}
await vm.methods.onEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=default export')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=default export', undefined)
})
})
@@ -723,7 +723,7 @@ describe('vm tests', () => {
}
await vm.methods.processEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched', undefined)
expect(event.properties).toEqual({ count: 2, query: 'bla', results: [true, true] })
})
@@ -745,7 +745,7 @@ describe('vm tests', () => {
}
await vm.methods.processEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched', undefined)
expect(event.properties).toEqual({ count: 2, query: 'bla', results: [true, true] })
})
@@ -766,7 +766,7 @@ describe('vm tests', () => {
}
await vm.methods.processEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=fetched', undefined)
expect(event.properties).toEqual({ count: 2, query: 'bla', results: [true, true] })
})
@@ -1051,7 +1051,7 @@ describe('vm tests', () => {
event: 'onEvent',
}
await vm.methods.onEvent!(event)
- expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=onEvent')
+ expect(fetch).toHaveBeenCalledWith('https://google.com/results.json?query=onEvent', undefined)
})
describe('exportEvents', () => {
@@ -1085,7 +1085,7 @@ describe('vm tests', () => {
await vm.methods.onEvent!({ ...defaultEvent, event: 'otherEvent2' })
await vm.methods.onEvent!({ ...defaultEvent, event: 'otherEvent3' })
await delay(1010)
- expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=otherEvent2&events=2')
+ expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=otherEvent2&events=2', undefined)
expect(hub.appMetrics.queueMetric).toHaveBeenCalledWith({
teamId: pluginConfig39.team_id,
pluginConfigId: pluginConfig39.id,
@@ -1136,8 +1136,8 @@ describe('vm tests', () => {
await vm.methods.onEvent!(event)
await delay(1010)
expect(fetch).toHaveBeenCalledTimes(4)
- expect(fetch).toHaveBeenCalledWith('https://onevent.com/')
- expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=exported&events=2')
+ expect(fetch).toHaveBeenCalledWith('https://onevent.com/', undefined)
+ expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=exported&events=2', undefined)
})
test('buffers bytes with exportEventsBufferBytes', async () => {
@@ -1264,10 +1264,16 @@ describe('vm tests', () => {
indexJs
)
await vm.methods.onEvent!(defaultEvent)
- expect(fetch).not.toHaveBeenCalledWith('https://export.com/results.json?query=default event&events=1')
+ expect(fetch).not.toHaveBeenCalledWith(
+ 'https://export.com/results.json?query=default event&events=1',
+ undefined
+ )
await vm.methods.teardownPlugin!()
- expect(fetch).toHaveBeenCalledWith('https://export.com/results.json?query=default event&events=1')
+ expect(fetch).toHaveBeenCalledWith(
+ 'https://export.com/results.json?query=default event&events=1',
+ undefined
+ )
})
})
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index c78c38396aa16..301601dff1e49 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1,4 +1,4 @@
-lockfileVersion: '6.1'
+lockfileVersion: '6.0'
settings:
autoInstallPeers: true
@@ -39,8 +39,8 @@ dependencies:
specifier: ^2.1.2
version: 2.1.2(react@16.14.0)
'@rrweb/types':
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
'@sentry/react':
specifier: 7.22.0
version: 7.22.0(react@16.14.0)
@@ -194,9 +194,12 @@ dependencies:
monaco-editor:
specifier: ^0.39.0
version: 0.39.0
+ papaparse:
+ specifier: ^5.4.1
+ version: 5.4.1
posthog-js:
- specifier: 1.78.2
- version: 1.78.2
+ specifier: 1.78.5
+ version: 1.78.5
posthog-js-lite:
specifier: 2.0.0-alpha5
version: 2.0.0-alpha5
@@ -282,8 +285,8 @@ dependencies:
specifier: ^1.5.1
version: 1.5.1
rrweb:
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
sass:
specifier: ^1.26.2
version: 1.56.0
@@ -432,6 +435,9 @@ devDependencies:
'@types/node':
specifier: ^18.11.9
version: 18.11.9
+ '@types/papaparse':
+ specifier: ^5.3.8
+ version: 5.3.8
'@types/pixelmatch':
specifier: ^5.2.4
version: 5.2.4
@@ -3928,10 +3934,10 @@ packages:
type-fest: 2.19.0
dev: false
- /@rrweb/types@2.0.0-alpha.9:
- resolution: {integrity: sha512-yS2KghLSmSSxo6H7tHrJ6u+nWJA9zCXaKFyc79rUSX8RHHSImRqocTqJ8jz794kCIWA90rvaQayRONdHO+vB0Q==}
+ /@rrweb/types@2.0.0-alpha.11:
+ resolution: {integrity: sha512-8ccocIkT5J/bfNRQY85qR/g6p5YQFpgFO2cMt4+Ex7w31Lq0yqZBRaoYEsawQKpLrn5KOHkdn2UTUrna7WMQuA==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/@sentry/browser@7.22.0:
@@ -6212,6 +6218,12 @@ packages:
resolution: {integrity: sha512-sn7L+qQ6RLPdXRoiaE7bZ/Ek+o4uICma/lBFPyJEKDTPTBP1W8u0c4baj3EiS4DiqLs+Hk+KUGvMVJtAw3ePJg==}
dev: false
+ /@types/papaparse@5.3.8:
+ resolution: {integrity: sha512-ArKIEOOWULbhi53wkAiRy1ze4wvrTfhpAj7Yfzva+EkmX2sV8PpFB+xqzJfzXNzK4me95FJH9QZt5NXFVGzOoQ==}
+ dependencies:
+ '@types/node': 18.11.9
+ dev: true
+
/@types/parse-json@4.0.0:
resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==}
dev: true
@@ -12993,7 +13005,7 @@ packages:
dependencies:
universalify: 2.0.0
optionalDependencies:
- graceful-fs: 4.2.10
+ graceful-fs: 4.2.11
/jsprim@2.0.2:
resolution: {integrity: sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==}
@@ -14265,6 +14277,10 @@ packages:
resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==}
dev: true
+ /papaparse@5.4.1:
+ resolution: {integrity: sha512-HipMsgJkZu8br23pW15uvo6sib6wne/4woLZPlFf3rpDyMe9ywEXUsuD7+6K9PRkJlVT51j/sCOYDKGGS3ZJrw==}
+ dev: false
+
/param-case@3.0.4:
resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==}
dependencies:
@@ -14908,8 +14924,8 @@ packages:
resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==}
dev: false
- /posthog-js@1.78.2:
- resolution: {integrity: sha512-jDy0QR+Mt7c4efq4knUsDVx/dT9DKMRLPimR/aSNTPRlAdWDNYD6WFv3oFyUk5tzkOPcKVJItRmmS2ua3tesYA==}
+ /posthog-js@1.78.5:
+ resolution: {integrity: sha512-UUipML52LEyks7Pbx/3dpBJc2iPJrW+Ss6Y0BiIygn+QZoBjIe1WjE4Ep+Fnz7+cX1axex/ZiYholBnW7E4Aug==}
dependencies:
fflate: 0.4.8
dev: false
@@ -16607,27 +16623,27 @@ packages:
resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==}
dev: false
- /rrdom@2.0.0-alpha.9:
- resolution: {integrity: sha512-jfaZ8tHi098P4GpPEtkOwnkucyKA5eGanAVHGPklzCqAeEq1Yx+9/y8AeOtF3yiobqKKkW8lLvFH2KrBH1CZlQ==}
+ /rrdom@2.0.0-alpha.11:
+ resolution: {integrity: sha512-U37m0t4jTz63wnVRcOQ5qFzSTrI5RdNgeXnHAha2Fmh9+1K+XuCx421a8D1wZk3WcDc2sFz/04FVdM0OD2caHg==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
- /rrweb-snapshot@2.0.0-alpha.9:
- resolution: {integrity: sha512-mHg1uUE2iUf0MXLE//4r5cMynkbduwmaOEis4gC7EuqkUAC1pYoLpcYYVt9lD6dgYIF6BmK6dgLLzMpD/tTyyA==}
+ /rrweb-snapshot@2.0.0-alpha.11:
+ resolution: {integrity: sha512-N0dzeJA2VhrlSOadkKwCVmV/DuNOwBH+Lhx89hAf9PQK4lCS8AP4AaylhqUdZOYHqwVjqsYel/uZ4hN79vuLhw==}
dev: false
- /rrweb@2.0.0-alpha.9:
- resolution: {integrity: sha512-8E2yiLY7IrFjDcVUZ7AcQtdBNFuTIsBrlCMpbyLua6X64dGRhOZ+IUDXLnAbNj5oymZgFtZu2UERG9rmV2VAng==}
+ /rrweb@2.0.0-alpha.11:
+ resolution: {integrity: sha512-vJ2gNvF+pUG9C2aaau7iSNqhWBSc4BwtUO4FpegOtDObuH4PIaxNJOlgHz82+WxKr9XPm93ER0LqmNpy0KYdKg==}
dependencies:
- '@rrweb/types': 2.0.0-alpha.9
+ '@rrweb/types': 2.0.0-alpha.11
'@types/css-font-loading-module': 0.0.7
'@xstate/fsm': 1.6.5
base64-arraybuffer: 1.0.2
fflate: 0.4.8
mitt: 3.0.0
- rrdom: 2.0.0-alpha.9
- rrweb-snapshot: 2.0.0-alpha.9
+ rrdom: 2.0.0-alpha.11
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/rtl-css-js@1.16.0:
diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py
index 263b0b5f154aa..dc84bf86b158f 100644
--- a/posthog/api/__init__.py
+++ b/posthog/api/__init__.py
@@ -4,7 +4,6 @@
from posthog.batch_exports import http as batch_exports
from posthog.settings import EE_AVAILABLE
from posthog.warehouse.api import saved_query, table, view_link
-
from . import (
activity_log,
annotation,
@@ -41,6 +40,7 @@
)
from .dashboards import dashboard, dashboard_templates
from .data_management import DataManagementViewSet
+from ..session_recordings.session_recording_api import SessionRecordingViewSet
@decorators.api_view(["GET", "HEAD", "POST", "PUT", "PATCH", "DELETE"])
@@ -212,7 +212,6 @@ def api_not_found(request):
from posthog.api.event import EventViewSet, LegacyEventViewSet # noqa: E402
from posthog.api.insight import InsightViewSet # noqa: E402
from posthog.api.person import LegacyPersonViewSet, PersonViewSet # noqa: E402
-from posthog.api.session_recording import SessionRecordingViewSet # noqa: E402
# Legacy endpoints CH (to be removed eventually)
router.register(r"cohort", LegacyCohortViewSet, basename="cohort")
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index bdd8ecf3ed555..f61543e14f5cb 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -1,9 +1,8 @@
import json
from typing import Any, Dict, List, Optional, cast
-from django.db.models import QuerySet
+from django.db.models import QuerySet, Q
from django.conf import settings
-from django.db.models.query_utils import Q
from rest_framework import authentication, exceptions, request, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated
@@ -70,6 +69,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo
rollout_percentage = serializers.SerializerMethodField()
experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
+ surveys: serializers.SerializerMethodField = serializers.SerializerMethodField()
features: serializers.SerializerMethodField = serializers.SerializerMethodField()
usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True)
analytics_dashboards = serializers.PrimaryKeyRelatedField(
@@ -100,6 +100,7 @@ class Meta:
"rollout_percentage",
"ensure_experience_continuity",
"experiment_set",
+ "surveys",
"features",
"rollback_conditions",
"performed_rollback",
@@ -129,6 +130,12 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict:
return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data
+ def get_surveys(self, feature_flag: FeatureFlag) -> Dict:
+ from posthog.api.survey import SurveyAPISerializer
+
+ return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore
+ # ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship
+
def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]:
if self.get_is_simple_flag(feature_flag):
return feature_flag.conditions[0].get("rollout_percentage")
@@ -343,7 +350,9 @@ def get_queryset(self) -> QuerySet:
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
)
+
survey_targeting_flags = Survey.objects.filter(team=self.team, targeting_flag__isnull=False).values_list(
"targeting_flag_id", flat=True
)
@@ -434,6 +443,7 @@ def my_flags(self, request: request.Request, **kwargs):
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
.select_related("created_by")
.order_by("-created_at")
)
diff --git a/posthog/api/insight.py b/posthog/api/insight.py
index 28713fd083f5e..8fc0dfa282735 100644
--- a/posthog/api/insight.py
+++ b/posthog/api/insight.py
@@ -138,7 +138,7 @@ def parse(self, stream, media_type=None, parser_context=None):
try:
query = data.get("query", None)
if query:
- schema.Model.parse_obj(query)
+ schema.Model.model_validate(query)
except Exception as error:
raise ParseError(detail=str(error))
else:
diff --git a/posthog/api/instance_settings.py b/posthog/api/instance_settings.py
index 9c76eef4f3d46..ab8b0fd45e8e3 100644
--- a/posthog/api/instance_settings.py
+++ b/posthog/api/instance_settings.py
@@ -83,7 +83,7 @@ def update(self, instance: InstanceSettingHelper, validated_data: Dict[str, Any]
# TODO: Move to top-level imports once CH is moved out of `ee`
from posthog.client import sync_execute
- from posthog.models.session_recording_event.sql import UPDATE_RECORDINGS_TABLE_TTL_SQL
+ from posthog.session_recordings.sql.session_recording_event_sql import UPDATE_RECORDINGS_TABLE_TTL_SQL
sync_execute(UPDATE_RECORDINGS_TABLE_TTL_SQL(), {"weeks": new_value_parsed})
diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py
index 7f3cfae9be957..5c25efe42815d 100644
--- a/posthog/api/notebook.py
+++ b/posthog/api/notebook.py
@@ -1,5 +1,5 @@
from typing import Dict, List, Optional, Any
-
+from django.db.models import Q
import structlog
from django.db import transaction
from django.db.models import QuerySet
@@ -74,6 +74,7 @@ class Meta:
"short_id",
"title",
"content",
+ "text_content",
"version",
"deleted",
"created_at",
@@ -250,8 +251,13 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query
queryset = queryset.filter(
last_modified_at__lt=relative_date_parse(request.GET["date_to"], self.team.timezone_info)
)
- elif key == "s":
- queryset = queryset.filter(title__icontains=request.GET["s"])
+ elif key == "search":
+ queryset = queryset.filter(
+ # some notebooks have no text_content until next saved, so we need to check the title too
+ # TODO this can be removed once all/most notebooks have text_content
+ Q(title__search=request.GET["search"])
+ | Q(text_content__search=request.GET["search"])
+ )
elif key == "contains":
contains = request.GET["contains"]
match_pairs = contains.replace(",", " ").split(" ")
diff --git a/posthog/api/query.py b/posthog/api/query.py
index f6c9e871d0c6d..c3cbb48cc9529 100644
--- a/posthog/api/query.py
+++ b/posthog/api/query.py
@@ -25,6 +25,8 @@
from posthog.hogql.errors import HogQLException
from posthog.hogql.metadata import get_hogql_metadata
from posthog.hogql.query import execute_hogql_query
+
+from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner
from posthog.models import Team
from posthog.models.event.events_query import run_events_query
from posthog.models.user import User
@@ -48,7 +50,7 @@ class QuerySchemaParser(JSONParser):
@staticmethod
def validate_query(data) -> Dict:
try:
- schema.Model.parse_obj(data)
+ schema.Model.model_validate(data)
# currently we have to return data not the parsed Model
# because pydantic doesn't know to discriminate on 'kind'
# if we can get this correctly typed we can return the parsed model
@@ -202,23 +204,26 @@ def process_query(team: Team, query_json: Dict, default_limit: Optional[int] = N
tag_queries(query=query_json)
if query_kind == "EventsQuery":
- events_query = EventsQuery.parse_obj(query_json)
- response = run_events_query(query=events_query, team=team, default_limit=default_limit)
- return _unwrap_pydantic_dict(response)
+ events_query = EventsQuery.model_validate(query_json)
+ events_response = run_events_query(query=events_query, team=team, default_limit=default_limit)
+ return _unwrap_pydantic_dict(events_response)
elif query_kind == "HogQLQuery":
- hogql_query = HogQLQuery.parse_obj(query_json)
- response = execute_hogql_query(
+ hogql_query = HogQLQuery.model_validate(query_json)
+ hogql_response = execute_hogql_query(
query_type="HogQLQuery",
query=hogql_query.query,
team=team,
filters=hogql_query.filters,
default_limit=default_limit,
)
- return _unwrap_pydantic_dict(response)
+ return _unwrap_pydantic_dict(hogql_response)
elif query_kind == "HogQLMetadata":
- metadata_query = HogQLMetadata.parse_obj(query_json)
- response = get_hogql_metadata(query=metadata_query, team=team)
- return _unwrap_pydantic_dict(response)
+ metadata_query = HogQLMetadata.model_validate(query_json)
+ metadata_response = get_hogql_metadata(query=metadata_query, team=team)
+ return _unwrap_pydantic_dict(metadata_response)
+ elif query_kind == "LifecycleQuery":
+ lifecycle_query_runner = LifecycleQueryRunner(query_json, team)
+ return _unwrap_pydantic_dict(lifecycle_query_runner.run())
elif query_kind == "DatabaseSchemaQuery":
database = create_hogql_database(team.pk)
return serialize_database(database)
diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py
index 072e93d97e9b2..ccbc8b5f68794 100644
--- a/posthog/api/sharing.py
+++ b/posthog/api/sharing.py
@@ -15,15 +15,15 @@
from posthog.api.exports import ExportedAssetSerializer
from posthog.api.insight import InsightSerializer
from posthog.api.routing import StructuredViewSetMixin
-from posthog.api.session_recording import SessionRecordingSerializer
from posthog.models import SharingConfiguration, Team
from posthog.models.activity_logging.activity_log import log_activity, Detail, Change
from posthog.models.dashboard import Dashboard
from posthog.models.exported_asset import ExportedAsset, asset_for_token, get_content_response
from posthog.models.insight import Insight
-from posthog.models.session_recording import SessionRecording
+from posthog.models import SessionRecording
from posthog.models.user import User
from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission
+from posthog.session_recordings.session_recording_api import SessionRecordingSerializer
from posthog.user_permissions import UserPermissions
from posthog.utils import render_template
diff --git a/posthog/api/survey.py b/posthog/api/survey.py
index 0e6135508edbe..b06e63487cd67 100644
--- a/posthog/api/survey.py
+++ b/posthog/api/survey.py
@@ -56,7 +56,8 @@ class Meta:
class SurveySerializerCreateUpdateOnly(SurveySerializer):
linked_flag_id = serializers.IntegerField(required=False, write_only=True, allow_null=True)
targeting_flag_id = serializers.IntegerField(required=False, write_only=True)
- targeting_flag_filters = serializers.JSONField(required=False, write_only=True)
+ targeting_flag_filters = serializers.JSONField(required=False, write_only=True, allow_null=True)
+ remove_targeting_flag = serializers.BooleanField(required=False, write_only=True, allow_null=True)
class Meta:
model = Survey
@@ -70,6 +71,7 @@ class Meta:
"targeting_flag_id",
"targeting_flag",
"targeting_flag_filters",
+ "remove_targeting_flag",
"questions",
"conditions",
"appearance",
@@ -82,7 +84,7 @@ class Meta:
read_only_fields = ["id", "linked_flag", "targeting_flag", "created_at"]
def validate(self, data):
- linked_flag_id = data.get("linked_flag_id", None)
+ linked_flag_id = data.get("linked_flag_id")
if linked_flag_id:
try:
FeatureFlag.objects.get(pk=linked_flag_id)
@@ -91,15 +93,41 @@ def validate(self, data):
if (
self.context["request"].method == "POST"
- and Survey.objects.filter(name=data.get("name", None), team_id=self.context["team_id"]).exists()
+ and Survey.objects.filter(name=data.get("name"), team_id=self.context["team_id"]).exists()
):
raise serializers.ValidationError("There is already a survey with this name.", code="unique")
+ existing_survey: Survey | None = self.instance
+
+ if (
+ existing_survey
+ and existing_survey.name != data.get("name")
+ and Survey.objects.filter(name=data.get("name"), team_id=self.context["team_id"])
+ .exclude(id=existing_survey.id)
+ .exists()
+ ):
+ raise serializers.ValidationError("There is already another survey with this name.", code="unique")
+
+ if data.get("targeting_flag_filters"):
+ groups = (data.get("targeting_flag_filters") or {}).get("groups") or []
+ full_rollout = any(
+ group.get("rollout_percentage") in [100, None] and len(group.get("properties", [])) == 0
+ for group in groups
+ )
+
+ if full_rollout:
+ raise serializers.ValidationError(
+ "Invalid operation: User targeting rolls out to everyone. If you want to roll out to everyone, delete this targeting",
+ code="invalid",
+ )
return data
def create(self, validated_data):
+ if "remove_targeting_flag" in validated_data:
+ validated_data.pop("remove_targeting_flag")
+
validated_data["team_id"] = self.context["team_id"]
- if validated_data.get("targeting_flag_filters", None):
+ if validated_data.get("targeting_flag_filters"):
targeting_feature_flag = self._create_new_targeting_flag(
validated_data["name"], validated_data["targeting_flag_filters"]
)
@@ -110,13 +138,21 @@ def create(self, validated_data):
return super().create(validated_data)
def update(self, instance: Survey, validated_data):
+
+ if validated_data.get("remove_targeting_flag"):
+ if instance.targeting_flag:
+ instance.targeting_flag.delete()
+ validated_data["targeting_flag_id"] = None
+ validated_data.pop("remove_targeting_flag")
+
# if the target flag filters come back with data, update the targeting feature flag if there is one, otherwise create a new one
- if validated_data.get("targeting_flag_filters", None):
+ if validated_data.get("targeting_flag_filters"):
+ new_filters = validated_data["targeting_flag_filters"]
if instance.targeting_flag:
existing_targeting_flag = instance.targeting_flag
serialized_data_filters = {
**existing_targeting_flag.filters,
- **validated_data["targeting_flag_filters"],
+ **new_filters,
}
existing_flag_serializer = FeatureFlagSerializer(
existing_targeting_flag,
@@ -127,9 +163,10 @@ def update(self, instance: Survey, validated_data):
existing_flag_serializer.is_valid(raise_exception=True)
existing_flag_serializer.save()
else:
- new_flag = self._create_new_targeting_flag(instance.name, validated_data["targeting_flag_filters"])
+ new_flag = self._create_new_targeting_flag(instance.name, new_filters)
validated_data["targeting_flag_id"] = new_flag.id
validated_data.pop("targeting_flag_filters")
+
return super().update(instance, validated_data)
def _create_new_targeting_flag(self, name, filters):
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index af0efd4023fe7..245b0ceb08720 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -157,6 +159,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -168,6 +171,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -267,6 +271,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -278,6 +283,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -468,6 +474,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -479,6 +486,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -626,6 +634,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -637,6 +646,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -795,6 +805,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -806,6 +817,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -951,6 +963,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -962,6 +975,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1181,6 +1195,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1192,6 +1207,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1230,6 +1246,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1241,6 +1258,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1376,6 +1394,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1387,6 +1406,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1478,6 +1498,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1489,6 +1510,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1527,6 +1549,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1538,6 +1561,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1671,6 +1695,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1682,6 +1707,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1789,6 +1815,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1800,6 +1827,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2040,6 +2068,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2051,6 +2080,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2272,6 +2302,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2283,6 +2314,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2390,6 +2422,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2401,6 +2434,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2501,6 +2535,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2512,6 +2547,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2612,6 +2648,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2623,6 +2660,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2703,6 +2741,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2714,6 +2753,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2845,6 +2885,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2856,6 +2897,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2933,6 +2975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2944,6 +2987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3048,6 +3092,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3059,6 +3104,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3163,6 +3209,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3174,6 +3221,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3289,6 +3337,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3300,6 +3349,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3600,6 +3650,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3611,6 +3662,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3750,6 +3802,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3761,6 +3814,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3874,6 +3928,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3885,6 +3940,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3951,6 +4007,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3962,6 +4019,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4104,6 +4162,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4115,6 +4174,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4153,6 +4213,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4164,6 +4225,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4268,6 +4330,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4279,6 +4342,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4409,6 +4473,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4420,6 +4485,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4825,6 +4891,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4836,6 +4903,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4956,6 +5024,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4967,6 +5036,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5033,6 +5103,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5044,6 +5115,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5148,6 +5220,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5159,6 +5232,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5224,6 +5298,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5235,6 +5310,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5273,6 +5349,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5284,6 +5361,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5388,6 +5466,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5399,6 +5478,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5520,6 +5600,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5531,6 +5612,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5674,6 +5756,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5685,6 +5768,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6072,6 +6156,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6083,6 +6168,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6211,6 +6297,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6222,6 +6309,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6383,6 +6471,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6394,6 +6483,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6541,6 +6631,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6552,6 +6643,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6671,6 +6763,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6682,6 +6775,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6752,6 +6846,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6763,6 +6858,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6908,6 +7004,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6919,6 +7016,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7538,6 +7636,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7549,6 +7648,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7780,6 +7880,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7791,6 +7892,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7933,6 +8035,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7944,6 +8047,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7982,6 +8086,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7993,6 +8098,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8097,6 +8203,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8108,6 +8215,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8238,6 +8346,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8249,6 +8358,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8353,6 +8463,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8364,6 +8475,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8480,6 +8592,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8491,6 +8604,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8612,6 +8726,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8623,6 +8738,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8912,6 +9028,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8923,6 +9040,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9058,6 +9176,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9069,6 +9188,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9153,6 +9273,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9164,6 +9285,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9274,6 +9396,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9285,6 +9408,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9392,6 +9516,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9403,6 +9528,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9513,6 +9639,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9524,6 +9651,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9685,6 +9813,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9696,6 +9825,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9834,6 +9964,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9845,6 +9976,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9929,6 +10061,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9940,6 +10073,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10081,6 +10215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10092,6 +10227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10248,6 +10384,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10259,6 +10396,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10350,6 +10488,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10361,6 +10500,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10502,6 +10642,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10513,6 +10654,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10631,6 +10773,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10642,6 +10785,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10831,6 +10975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10842,6 +10987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index 396f5103c7ec3..299074ec3d44b 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -76,6 +78,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -94,6 +97,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -120,6 +124,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -131,6 +136,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -169,6 +175,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -180,6 +187,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -277,6 +285,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -288,6 +297,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -479,6 +489,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -490,6 +501,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -555,6 +567,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -572,6 +585,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -583,6 +597,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -671,6 +686,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -682,6 +698,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py
index 3f49024d708e9..1b7f36ae54ce3 100644
--- a/posthog/api/test/notebooks/test_notebook.py
+++ b/posthog/api/test/notebooks/test_notebook.py
@@ -1,4 +1,4 @@
-from typing import List, Dict, Optional
+from typing import List, Dict
from unittest import mock
from freezegun import freeze_time
@@ -67,17 +67,20 @@ def test_cannot_list_deleted_notebook(self) -> None:
@parameterized.expand(
[
- ("without_content", None),
- ("with_content", {"some": "kind", "of": "tip", "tap": "content"}),
+ ("without_content", None, None),
+ ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"),
]
)
- def test_create_a_notebook(self, _, content: Optional[Dict]) -> None:
- response = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={"content": content})
+ def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content}
+ )
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == {
"id": response.json()["id"],
"short_id": response.json()["short_id"],
"content": content,
+ "text_content": text_content,
"title": None,
"version": 0,
"created_at": mock.ANY,
diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py
index 4e9f9370c178d..5f634de548fc7 100644
--- a/posthog/api/test/notebooks/test_notebook_filtering.py
+++ b/posthog/api/test/notebooks/test_notebook_filtering.py
@@ -42,7 +42,7 @@
},
}
-BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}]}
+BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text}
class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest):
@@ -62,20 +62,22 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit
@parameterized.expand(
[
- ["some text", [0]],
- ["other text", [1]],
- ["text", [0, 1]],
+ ["i ride", [0]],
+ ["pony", [0]],
+ ["ponies", [0]],
+ ["my hobby", [1]],
+ ["around", [0, 1]],
["random", []],
]
)
def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None:
notebook_ids = [
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="some text"),
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="other text"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"),
]
response = self.client.get(
- f"/api/projects/{self.team.id}/notebooks?s={search_text}",
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
)
assert response.status_code == status.HTTP_200_OK
@@ -83,6 +85,32 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes:
assert len(results) == len(expected_match_indexes)
assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+ @parameterized.expand(
+ [
+ ["pony", [0]],
+ ["pOnY", [0]],
+ ["ponies", [0]],
+ ["goat", [1]],
+ ["ride", [0, 1]],
+ ["neither", []],
+ ]
+ )
+ def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None:
+ notebook_ids = [
+ # will match both pony and ponies
+ self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"),
+ self._create_notebook_with_content([BASIC_TEXT("but may not ride a goat")], title="never matches"),
+ ]
+
+ response = self.client.get(
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ results = response.json()["results"]
+ assert len(results) == len(expected_match_indexes)
+ assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+
def test_filters_based_on_params(self) -> None:
other_user = User.objects.create_and_join(self.organization, "other@posthog.com", "password")
notebook_one = Notebook.objects.create(team=self.team, created_by=self.user)
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index 9efebf97b878b..b243b46200764 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -939,7 +939,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -950,7 +950,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -961,7 +961,7 @@ def test_getting_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(FuzzyInt(10, 11)):
+ with self.assertNumQueries(FuzzyInt(11, 12)):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -972,7 +972,7 @@ def test_getting_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(FuzzyInt(10, 11)):
+ with self.assertNumQueries(FuzzyInt(11, 12)):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py
index 4e046a9268059..cde8de9c22196 100644
--- a/posthog/api/test/test_query.py
+++ b/posthog/api/test/test_query.py
@@ -407,7 +407,7 @@ def test_full_hogql_query(self):
with freeze_time("2020-01-10 12:14:00"):
query = HogQLQuery(query="select event, distinct_id, properties.key from events order by timestamp")
api_response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json()
- query.response = HogQLQueryResponse.parse_obj(api_response)
+ query.response = HogQLQueryResponse.model_validate(api_response)
self.assertEqual(query.response.results and len(query.response.results), 4)
self.assertEqual(
@@ -475,7 +475,7 @@ def test_invalid_query_kind(self):
assert api_response.status_code == 400
assert api_response.json()["code"] == "parse_error"
assert "validation errors for Model" in api_response.json()["detail"]
- assert "type=value_error.const; given=Tomato Soup" in api_response.json()["detail"]
+ assert "type=literal_error, input_value='Tomato Soup'" in api_response.json()["detail"]
@snapshot_clickhouse_queries
def test_full_hogql_query_view(self):
@@ -498,7 +498,6 @@ def test_full_hogql_query_view(self):
flush_persons_and_events()
with freeze_time("2020-01-10 12:14:00"):
-
self.client.post(
f"/api/projects/{self.team.id}/warehouse_saved_queries/",
{
@@ -511,7 +510,7 @@ def test_full_hogql_query_view(self):
)
query = HogQLQuery(query="select * from event_view")
api_response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json()
- query.response = HogQLQueryResponse.parse_obj(api_response)
+ query.response = HogQLQueryResponse.model_validate(api_response)
self.assertEqual(query.response.results and len(query.response.results), 4)
self.assertEqual(
diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py
index 45e13024c1a0b..820e4127edff4 100644
--- a/posthog/api/test/test_survey.py
+++ b/posthog/api/test/test_survey.py
@@ -77,6 +77,122 @@ def test_can_create_survey_with_linked_flag_and_targeting(self):
{"type": "open", "question": "What would you want to improve from notebooks?"}
]
+ def test_can_create_survey_with_targeting_with_remove_parameter(self):
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks power users survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "remove_targeting_flag": False,
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists()
+ assert response_data["targeting_flag"]["filters"] == {
+ "groups": [
+ {
+ "variant": None,
+ "properties": [{"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}],
+ "rollout_percentage": None,
+ }
+ ]
+ }
+ assert response_data["conditions"] == {"url": "https://app.posthog.com/notebooks"}
+ assert response_data["questions"] == [
+ {"type": "open", "question": "What would you want to improve from notebooks?"}
+ ]
+
+ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None:
+ self.maxDiff = None
+
+ ff_key = "notebooks"
+ notebooks_flag = FeatureFlag.objects.create(team=self.team, key=ff_key, created_by=self.user)
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks power users survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists()
+
+ created_survey1 = response.json()["id"]
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks random survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert response_data["targeting_flag"] is None
+
+ created_survey2 = response.json()["id"]
+
+ # add another random feature flag
+ self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}},
+ format="json",
+ ).json()
+
+ with self.assertNumQueries(12):
+ response = self.client.get(f"/api/projects/{self.team.id}/feature_flags")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ result = response.json()
+
+ self.assertEqual(result["count"], 2)
+
+ self.assertEqual(
+ [(res["key"], [survey["id"] for survey in res["surveys"]]) for res in result["results"]],
+ [("flag_0", []), (ff_key, [created_survey1, created_survey2])],
+ )
+
def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self):
survey_with_targeting = self.client.post(
f"/api/projects/{self.team.id}/surveys/",
@@ -145,14 +261,241 @@ def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self):
updated_survey_updates_targeting_flag = self.client.patch(
f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
data={
- "targeting_flag_filters": {"groups": [{"variant": None, "rollout_percentage": None, "properties": []}]},
+ "targeting_flag_filters": {"groups": [{"variant": None, "rollout_percentage": 20, "properties": []}]},
},
)
assert updated_survey_updates_targeting_flag.status_code == status.HTTP_200_OK
assert FeatureFlag.objects.filter(id=survey_with_targeting["targeting_flag"]["id"]).get().filters == {
- "groups": [{"variant": None, "properties": [], "rollout_percentage": None}]
+ "groups": [{"variant": None, "properties": [], "rollout_percentage": 20}]
}
+ def test_updating_survey_to_remove_targeting_doesnt_delete_targeting_flag(self):
+ survey_with_targeting = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey with targeting",
+ "type": "popover",
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ ).json()
+
+ flagId = survey_with_targeting["targeting_flag"]["id"]
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "name": "other",
+ # "targeting_flag_filters": None, # don't delete these
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_200_OK
+ assert updated_survey_deletes_targeting_flag.json()["name"] == "other"
+ assert updated_survey_deletes_targeting_flag.json()["targeting_flag"] is not None
+
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ def test_updating_survey_to_send_none_targeting_deletes_targeting_flag(self):
+ survey_with_targeting = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey with targeting",
+ "type": "popover",
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ ).json()
+
+ flagId = survey_with_targeting["targeting_flag"]["id"]
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "remove_targeting_flag": True, # delete targeting flag
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_200_OK
+ assert updated_survey_deletes_targeting_flag.json()["name"] == "survey with targeting"
+ assert updated_survey_deletes_targeting_flag.json()["targeting_flag"] is None
+
+ with self.assertRaises(FeatureFlag.DoesNotExist):
+ FeatureFlag.objects.get(id=flagId)
+
+ def test_updating_survey_other_props_doesnt_delete_targeting_flag(self):
+ survey_with_targeting = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey with targeting",
+ "type": "popover",
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ ).json()
+
+ flagId = survey_with_targeting["targeting_flag"]["id"]
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={"start_date": "2023-04-01T12:00:10"},
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_200_OK
+ assert updated_survey_deletes_targeting_flag.json()["name"] == "survey with targeting"
+ assert updated_survey_deletes_targeting_flag.json()["targeting_flag"] is not None
+
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ def test_survey_targeting_flag_validation(self):
+ survey_with_targeting = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey with targeting",
+ "type": "popover",
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ ).json()
+
+ flagId = survey_with_targeting["targeting_flag"]["id"]
+ assert FeatureFlag.objects.filter(id=flagId).exists()
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [],
+ }
+ ]
+ },
+ },
+ )
+
+ invalid_detail = "Invalid operation: User targeting rolls out to everyone. If you want to roll out to everyone, delete this targeting"
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_400_BAD_REQUEST
+ assert updated_survey_deletes_targeting_flag.json()["detail"] == invalid_detail
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": 100,
+ "properties": [{"key": "value"}],
+ },
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [],
+ },
+ ]
+ },
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_400_BAD_REQUEST
+ assert updated_survey_deletes_targeting_flag.json()["detail"] == invalid_detail
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": 100,
+ "properties": [{"key": "value"}],
+ },
+ {
+ "variant": None,
+ "rollout_percentage": 100,
+ "properties": [],
+ },
+ ]
+ },
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_400_BAD_REQUEST
+ assert updated_survey_deletes_targeting_flag.json()["detail"] == invalid_detail
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": 100,
+ "properties": [{"key": "value", "type": "person", "value": "bleh"}],
+ },
+ {
+ "variant": None,
+ "rollout_percentage": 30,
+ "properties": [],
+ },
+ ]
+ },
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_200_OK
+
def test_deleting_survey_does_not_delete_linked_flag(self):
linked_flag = FeatureFlag.objects.create(team=self.team, key="early-access", created_by=self.user)
@@ -242,6 +585,49 @@ def test_can_list_surveys(self):
],
}
+ def test_updating_survey_name_validates(self):
+ survey_with_targeting = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey with targeting",
+ "type": "popover",
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ ).json()
+
+ self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "survey without targeting",
+ "type": "popover",
+ },
+ format="json",
+ ).json()
+
+ updated_survey_deletes_targeting_flag = self.client.patch(
+ f"/api/projects/{self.team.id}/surveys/{survey_with_targeting['id']}/",
+ data={
+ "name": "survey without targeting",
+ },
+ )
+
+ assert updated_survey_deletes_targeting_flag.status_code == status.HTTP_400_BAD_REQUEST
+ assert (
+ updated_survey_deletes_targeting_flag.json()["detail"] == "There is already another survey with this name."
+ )
+
class TestSurveysAPIList(BaseTest, QueryMatchingTest):
def setUp(self):
diff --git a/posthog/api/test/test_utils.py b/posthog/api/test/test_utils.py
index c34aa06dac9a7..84a3d7315c220 100644
--- a/posthog/api/test/test_utils.py
+++ b/posthog/api/test/test_utils.py
@@ -147,20 +147,35 @@ def test_safe_clickhouse_string_unicode_non_surrogates(self):
self.assertEqual(safe_clickhouse_string("💜 \u1f49c\ 💜"), "💜 \u1f49c\ 💜")
def test_raise_if_user_provided_url_unsafe(self):
+ # Sync test cases with plugin-server/src/utils/fetch.test.ts
raise_if_user_provided_url_unsafe("https://google.com?q=20") # Safe
raise_if_user_provided_url_unsafe("https://posthog.com") # Safe
raise_if_user_provided_url_unsafe("https://posthog.com/foo/bar") # Safe, with path
raise_if_user_provided_url_unsafe("https://posthog.com:443") # Safe, good port
raise_if_user_provided_url_unsafe("https://1.1.1.1") # Safe, public IP
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("https://posthog.com:80")) # Bad port
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("ftp://posthog.com")) # Bad scheme
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("")) # Empty
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("posthog.com")) # No scheme
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("http://localhost")) # Internal
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5")) # Internal
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0")) # Internal
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24")) # Internal
- self.assertRaises(ValueError, lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21")) # Internal
- self.assertRaises(
- ValueError, lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com")
+ self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe(""))
+ self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("@@@"))
+ self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("posthog.com"))
+ self.assertRaisesMessage(
+ ValueError,
+ "Scheme must be either HTTP or HTTPS",
+ lambda: raise_if_user_provided_url_unsafe("ftp://posthog.com"),
+ )
+ self.assertRaisesMessage(
+ ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://localhost")
+ )
+ self.assertRaisesMessage(
+ ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5")
+ )
+ self.assertRaisesMessage(
+ ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0")
+ )
+ self.assertRaisesMessage(
+ ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24")
+ )
+ self.assertRaisesMessage(
+ ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21")
+ )
+ self.assertRaisesMessage(
+ ValueError, "Invalid hostname", lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com")
) # Non-existent
diff --git a/posthog/api/user.py b/posthog/api/user.py
index 0562943760244..4fbb85e8ca1a0 100644
--- a/posthog/api/user.py
+++ b/posthog/api/user.py
@@ -33,6 +33,7 @@
from posthog.api.shared import OrganizationBasicSerializer, TeamBasicSerializer
from posthog.api.utils import raise_if_user_provided_url_unsafe
from posthog.auth import authenticate_secondarily
+from posthog.cloud_utils import is_cloud
from posthog.email import is_email_available
from posthog.event_usage import report_user_logged_in, report_user_updated, report_user_verified_email
from posthog.models import Team, User, UserScenePersonalisation, Dashboard
@@ -450,7 +451,8 @@ def test_slack_webhook(request):
return JsonResponse({"error": "no webhook URL"})
message = {"text": "_Greetings_ from PostHog!"}
try:
- raise_if_user_provided_url_unsafe(webhook)
+ if is_cloud(): # Protect against SSRF
+ raise_if_user_provided_url_unsafe(webhook)
response = requests.post(webhook, verify=False, json=message)
if response.ok:
diff --git a/posthog/api/utils.py b/posthog/api/utils.py
index 2a9991ea37f4e..298908f1ccbe1 100644
--- a/posthog/api/utils.py
+++ b/posthog/api/utils.py
@@ -302,23 +302,20 @@ def parse_bool(value: Union[str, List[str]]) -> bool:
def raise_if_user_provided_url_unsafe(url: str):
- """Raise if the provided URL seems unsafe, otherwise do nothing."""
- parsed_url: urllib.parse.ParseResult = urllib.parse.urlparse(url)
+ """Raise if the provided URL seems unsafe, otherwise do nothing.
+
+ Equivalent of plugin server raiseIfUserProvidedUrlUnsafe.
+ """
+ parsed_url: urllib.parse.ParseResult = urllib.parse.urlparse(url) # urlparse never raises errors
if not parsed_url.hostname:
raise ValueError("No hostname")
- if parsed_url.scheme == "http":
- port = 80
- elif parsed_url.scheme == "https":
- port = 443
- else:
+ if parsed_url.scheme not in ("http", "https"):
raise ValueError("Scheme must be either HTTP or HTTPS")
- if parsed_url.port is not None and parsed_url.port != port:
- raise ValueError("Port does not match scheme")
# Disallow if hostname resolves to a private (internal) IP address
try:
- addrinfo = socket.getaddrinfo(parsed_url.hostname, port)
+ addrinfo = socket.getaddrinfo(parsed_url.hostname, None)
except socket.gaierror:
raise ValueError("Invalid hostname")
for _, _, _, _, sockaddr in addrinfo:
if ip_address(sockaddr[0]).is_private: # Prevent addressing internal services
- raise ValueError("Invalid hostname")
+ raise ValueError("Internal hostname")
diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py
index 5aa0fa7d18e22..b5eb182e68a70 100644
--- a/posthog/batch_exports/service.py
+++ b/posthog/batch_exports/service.py
@@ -52,6 +52,8 @@ class S3BatchExportInputs:
data_interval_end: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
@dataclass
diff --git a/posthog/celery.py b/posthog/celery.py
index da4e2342a25cf..7ed47f2503639 100644
--- a/posthog/celery.py
+++ b/posthog/celery.py
@@ -6,14 +6,14 @@
from celery import Celery
from celery.schedules import crontab
-from celery.signals import setup_logging, task_postrun, task_prerun, worker_process_init
+from celery.signals import setup_logging, task_postrun, task_prerun, worker_process_init, task_success, task_failure
from django.conf import settings
from django.db import connection
from django.dispatch import receiver
from django.utils import timezone
from django_structlog.celery import signals
from django_structlog.celery.steps import DjangoStructLogInitStep
-from prometheus_client import Gauge
+from prometheus_client import Gauge, Counter
from posthog.cloud_utils import is_cloud
from posthog.metrics import pushed_metrics_registry
@@ -25,6 +25,24 @@
app = Celery("posthog")
+CELERY_TASK_PRE_RUN_COUNTER = Counter(
+ "posthog_celery_task_pre_run",
+ "task prerun signal is dispatched before a task is executed.",
+ labelnames=["task_name"],
+)
+
+CELERY_TASK_SUCCESS_COUNTER = Counter(
+ "posthog_celery_task_success",
+ "task success signal is dispatched when a task succeeds.",
+ labelnames=["task_name"],
+)
+
+CELERY_TASK_FAILURE_COUNTER = Counter(
+ "posthog_celery_task_failure",
+ "task failure signal is dispatched when a task succeeds.",
+ labelnames=["task_name"],
+)
+
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
@@ -63,8 +81,10 @@ def receiver_bind_extra_request_metadata(sender, signal, task=None, logger=None)
@worker_process_init.connect
def on_worker_start(**kwargs) -> None:
from posthog.settings import sentry_init
+ from prometheus_client import start_http_server
sentry_init()
+ start_http_server(8001)
@app.on_after_configure.connect
@@ -209,6 +229,18 @@ def pre_run_signal_handler(task_id, task, **kwargs):
tag_queries(kind="celery", id=task.name)
set_default_clickhouse_workload_type(Workload.OFFLINE)
+ CELERY_TASK_PRE_RUN_COUNTER.labels(task_name=task.name).inc()
+
+
+@task_success.connect
+def success_signal_handler(sender, **kwargs):
+ CELERY_TASK_SUCCESS_COUNTER.labels(task_name=sender.name).inc()
+
+
+@task_failure.connect
+def failure_signal_handler(sender, **kwargs):
+ CELERY_TASK_FAILURE_COUNTER.labels(task_name=sender.name).inc()
+
@task_postrun.connect
def teardown_instrumentation(task_id, task, **kwargs):
@@ -866,7 +898,7 @@ def check_flags_to_rollback():
@app.task(ignore_result=True)
def ee_persist_single_recording(id: str, team_id: int):
try:
- from ee.tasks.session_recording.persistence import persist_single_recording
+ from ee.session_recordings.persistence_tasks import persist_single_recording
persist_single_recording(id, team_id)
except ImportError:
@@ -876,7 +908,7 @@ def ee_persist_single_recording(id: str, team_id: int):
@app.task(ignore_result=True)
def ee_persist_finished_recordings():
try:
- from ee.tasks.session_recording.persistence import persist_finished_recordings
+ from ee.session_recordings.persistence_tasks import persist_finished_recordings
except ImportError:
pass
else:
diff --git a/posthog/clickhouse/migrations/0006_session_recording_events.py b/posthog/clickhouse/migrations/0006_session_recording_events.py
index 33a1765165741..5f9f1a8212261 100644
--- a/posthog/clickhouse/migrations/0006_session_recording_events.py
+++ b/posthog/clickhouse/migrations/0006_session_recording_events.py
@@ -1,5 +1,5 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.session_recording_event.sql import (
+from posthog.session_recordings.sql.session_recording_event_sql import (
DISTRIBUTED_SESSION_RECORDING_EVENTS_TABLE_SQL,
KAFKA_SESSION_RECORDING_EVENTS_TABLE_SQL,
SESSION_RECORDING_EVENTS_TABLE_MV_SQL,
diff --git a/posthog/clickhouse/migrations/0020_session_recording_events_window_id.py b/posthog/clickhouse/migrations/0020_session_recording_events_window_id.py
index d8cffe6140833..6c5cf0c46649f 100644
--- a/posthog/clickhouse/migrations/0020_session_recording_events_window_id.py
+++ b/posthog/clickhouse/migrations/0020_session_recording_events_window_id.py
@@ -1,5 +1,5 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.session_recording_event.sql import (
+from posthog.session_recordings.sql.session_recording_event_sql import (
KAFKA_SESSION_RECORDING_EVENTS_TABLE_SQL,
SESSION_RECORDING_EVENTS_TABLE_MV_SQL,
)
diff --git a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
index 43a611ce4e2b1..be819a0111a01 100644
--- a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
+++ b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py
@@ -1,7 +1,7 @@
from infi.clickhouse_orm import migrations
from posthog.client import sync_execute
-from posthog.models.session_recording_event.sql import MATERIALIZED_COLUMNS
+from posthog.session_recordings.sql.session_recording_event_sql import MATERIALIZED_COLUMNS
from posthog.settings import CLICKHOUSE_CLUSTER
diff --git a/posthog/clickhouse/migrations/0043_session_replay_events.py b/posthog/clickhouse/migrations/0043_session_replay_events.py
index 1fde598e3ea6c..32658df0e50e0 100644
--- a/posthog/clickhouse/migrations/0043_session_replay_events.py
+++ b/posthog/clickhouse/migrations/0043_session_replay_events.py
@@ -1,5 +1,5 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.session_replay_event.sql import (
+from posthog.session_recordings.sql.session_replay_event_sql import (
SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
SESSION_REPLAY_EVENTS_TABLE_SQL,
diff --git a/posthog/clickhouse/migrations/0044_session_replay_events_console_counts.py b/posthog/clickhouse/migrations/0044_session_replay_events_console_counts.py
index b3d3b02fc8510..385165096a353 100644
--- a/posthog/clickhouse/migrations/0044_session_replay_events_console_counts.py
+++ b/posthog/clickhouse/migrations/0044_session_replay_events_console_counts.py
@@ -1,12 +1,12 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.session_replay_event.migrations_sql import (
+from posthog.session_recordings.sql.session_replay_event_migrations_sql import (
DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_CONSOLE_COUNTS_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_CONSOLE_COUNTS_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_CONSOLE_COUNTS_SESSION_REPLAY_EVENTS_TABLE_SQL,
)
-from posthog.models.session_replay_event.sql import (
+from posthog.session_recordings.sql.session_replay_event_sql import (
SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
)
diff --git a/posthog/clickhouse/migrations/0045_session_replay_events_size.py b/posthog/clickhouse/migrations/0045_session_replay_events_size.py
index 7f42278252b99..f09862de0bfc1 100644
--- a/posthog/clickhouse/migrations/0045_session_replay_events_size.py
+++ b/posthog/clickhouse/migrations/0045_session_replay_events_size.py
@@ -1,12 +1,12 @@
from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
-from posthog.models.session_replay_event.migrations_sql import (
+from posthog.session_recordings.sql.session_replay_event_migrations_sql import (
DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_SIZE_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_SIZE_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
ADD_SIZE_SESSION_REPLAY_EVENTS_TABLE_SQL,
)
-from posthog.models.session_replay_event.sql import (
+from posthog.session_recordings.sql.session_replay_event_sql import (
SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
)
diff --git a/posthog/clickhouse/migrations/0048_session_replay_events_count.py b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
new file mode 100644
index 0000000000000..2756f49ce2d46
--- /dev/null
+++ b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
@@ -0,0 +1,26 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.session_recordings.sql.session_replay_event_migrations_sql import (
+ DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+from posthog.session_recordings.sql.session_replay_event_sql import (
+ SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+
+operations = [
+ # we have to drop materialized view first so that we're no longer pulling from kakfa
+ # then we drop the kafka table
+ run_sql_with_exceptions(DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+ run_sql_with_exceptions(DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # now we can alter the target tables
+ run_sql_with_exceptions(ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # and then recreate the materialized views and kafka tables
+ run_sql_with_exceptions(KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+]
diff --git a/posthog/clickhouse/schema.py b/posthog/clickhouse/schema.py
index aa31fb4343a63..b9e291092d0f1 100644
--- a/posthog/clickhouse/schema.py
+++ b/posthog/clickhouse/schema.py
@@ -27,8 +27,8 @@
PERSON_OVERRIDES_CREATE_MATERIALIZED_VIEW_SQL,
PERSON_OVERRIDES_CREATE_TABLE_SQL,
)
-from posthog.models.session_recording_event.sql import *
-from posthog.models.session_replay_event.sql import (
+from posthog.session_recordings.sql.session_recording_event_sql import *
+from posthog.session_recordings.sql.session_replay_event_sql import (
KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
SESSION_REPLAY_EVENTS_TABLE_SQL,
diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py
index 417525330a96c..b8cb1a6575970 100644
--- a/posthog/clickhouse/system_status.py
+++ b/posthog/clickhouse/system_status.py
@@ -11,7 +11,7 @@
from posthog.client import query_with_columns, sync_execute
from posthog.cloud_utils import is_cloud
from posthog.models.event.util import get_event_count, get_event_count_for_last_month, get_event_count_month_to_date
-from posthog.models.session_recording_event.util import (
+from posthog.session_recordings.models.system_status_queries import (
get_recording_count_month_to_date,
get_recording_events_count_month_to_date,
)
diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
index 36ab529259c77..ac21b1ac5989f 100644
--- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr
+++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
@@ -336,7 +336,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -922,7 +924,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('kafka:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -1344,7 +1348,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_session_replay_events', sipHash64(distinct_id))
'
@@ -1377,7 +1389,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
- sum(size) as size
+ sum(size) as size,
+ -- we can count the number of kafka messages instead of sending it explicitly
+ sum(message_count) as message_count,
+ sum(event_count) as event_count,
+ max(_timestamp) as _timestamp
FROM posthog_test.kafka_session_replay_events
group by session_id, team_id
@@ -1608,7 +1624,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
@@ -2226,7 +2250,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
diff --git a/posthog/conftest.py b/posthog/conftest.py
index 0b9d3fd85c99c..06e7e256aed79 100644
--- a/posthog/conftest.py
+++ b/posthog/conftest.py
@@ -40,7 +40,7 @@ def reset_clickhouse_tables():
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL,
TRUNCATE_PERSON_TABLE_SQL,
)
- from posthog.models.session_recording_event.sql import TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL
+ from posthog.session_recordings.sql.session_recording_event_sql import TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL
# REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY!
TABLES_TO_CREATE_DROP = [
diff --git a/posthog/errors.py b/posthog/errors.py
index 5cd3342f7a3fa..b2d34ed858448 100644
--- a/posthog/errors.py
+++ b/posthog/errors.py
@@ -206,7 +206,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta:
131: ErrorCodeMeta("TOO_LARGE_STRING_SIZE"),
133: ErrorCodeMeta("AGGREGATE_FUNCTION_DOESNT_ALLOW_PARAMETERS"),
134: ErrorCodeMeta("PARAMETERS_TO_AGGREGATE_FUNCTIONS_MUST_BE_LITERALS"),
- 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX"),
+ 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX", user_safe=True),
137: ErrorCodeMeta("UNKNOWN_ELEMENT_IN_CONFIG"),
138: ErrorCodeMeta("EXCESSIVE_ELEMENT_IN_CONFIG"),
139: ErrorCodeMeta("NO_ELEMENTS_IN_CONFIG"),
diff --git a/posthog/helpers/tests/test_session_recording_helpers.py b/posthog/helpers/tests/test_session_recording_helpers.py
index 5561d0131f287..ee6a3c6ccada2 100644
--- a/posthog/helpers/tests/test_session_recording_helpers.py
+++ b/posthog/helpers/tests/test_session_recording_helpers.py
@@ -42,7 +42,7 @@ def mock_capture_flow(events: List[dict], max_size_bytes=512 * 1024) -> Tuple[Li
)
new_replay_events = preprocess_replay_events_for_blob_ingestion(replay_events, max_size_bytes=max_size_bytes)
- return (legacy_replay_events + other_events, new_replay_events + other_events)
+ return legacy_replay_events + other_events, new_replay_events + other_events
def test_preprocess_with_no_recordings():
@@ -320,7 +320,7 @@ def test_decompress_data_returning_only_activity_info(chunked_and_compressed_sna
def test_get_events_summary_from_snapshot_data():
timestamp = round(datetime.now().timestamp() * 1000)
- snapshot_events = [
+ snapshot_events: List[SnapshotData | None] = [
# ignore malformed events
{"type": 2, "foo": "bar"},
# ignore other props
diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py
index a5c7d35f13675..fc01500f79e83 100644
--- a/posthog/hogql/constants.py
+++ b/posthog/hogql/constants.py
@@ -1,7 +1,7 @@
from datetime import date, datetime
from typing import Optional, Literal, TypeAlias, Tuple, List
from uuid import UUID
-from pydantic import BaseModel, Extra
+from pydantic import ConfigDict, BaseModel
ConstantDataType: TypeAlias = Literal[
"int", "float", "str", "bool", "array", "tuple", "date", "datetime", "uuid", "unknown"
@@ -24,8 +24,7 @@
# Settings applied on top of all HogQL queries.
class HogQLSettings(BaseModel):
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
readonly: Optional[int] = 2
max_execution_time: Optional[int] = 60
diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py
index 4f5cea81d1425..5cddd747c3429 100644
--- a/posthog/hogql/database/database.py
+++ b/posthog/hogql/database/database.py
@@ -1,6 +1,6 @@
-from typing import Any, Dict, List, Literal, Optional, TypedDict
+from typing import Any, ClassVar, Dict, List, Literal, Optional, TypedDict
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
-from pydantic import BaseModel, Extra
+from pydantic import ConfigDict, BaseModel
from posthog.hogql.database.models import (
FieldTraverser,
@@ -33,8 +33,7 @@
class Database(BaseModel):
- class Config:
- extra = Extra.allow
+ model_config = ConfigDict(extra="allow")
# Users can query from the tables below
events: EventsTable = EventsTable()
@@ -58,7 +57,7 @@ class Config:
numbers: NumbersTable = NumbersTable()
# clunky: keep table names in sync with above
- _table_names: List[str] = [
+ _table_names: ClassVar[List[str]] = [
"events",
"groups",
"person",
@@ -182,7 +181,7 @@ class SerializedField(_SerializedFieldBase, total=False):
def serialize_database(database: Database) -> Dict[str, List[SerializedField]]:
tables: Dict[str, List[SerializedField]] = {}
- for table_key in database.__fields__.keys():
+ for table_key in database.model_fields.keys():
field_input: Dict[str, Any] = {}
table = getattr(database, table_key, None)
if isinstance(table, FunctionCallTable):
diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py
index dc5b74724b96f..e5283eb68142e 100644
--- a/posthog/hogql/database/models.py
+++ b/posthog/hogql/database/models.py
@@ -1,5 +1,5 @@
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING
-from pydantic import BaseModel, Extra
+from pydantic import ConfigDict, BaseModel
from posthog.hogql.errors import HogQLException, NotImplementedException
@@ -16,8 +16,7 @@ class DatabaseField(FieldOrTable):
Base class for a field in a database table.
"""
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
name: str
array: Optional[bool] = None
@@ -57,17 +56,14 @@ class BooleanDatabaseField(DatabaseField):
class FieldTraverser(FieldOrTable):
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
chain: List[str]
class Table(FieldOrTable):
fields: Dict[str, FieldOrTable]
-
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
def has_field(self, name: str) -> bool:
return name in self.fields
@@ -102,8 +98,7 @@ def get_asterisk(self):
class LazyJoin(FieldOrTable):
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
join_function: Callable[[str, str, Dict[str, Any]], Any]
join_table: Table
@@ -115,8 +110,7 @@ class LazyTable(Table):
A table that is replaced with a subquery returned from `lazy_select(requested_fields: Dict[name, chain])`
"""
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
def lazy_select(self, requested_fields: Dict[str, List[str]]) -> Any:
raise NotImplementedException("LazyTable.lazy_select not overridden")
@@ -127,8 +121,7 @@ class VirtualTable(Table):
A nested table that reuses the parent for storage. E.g. events.person.* fields with PoE enabled.
"""
- class Config:
- extra = Extra.forbid
+ model_config = ConfigDict(extra="forbid")
class FunctionCallTable(Table):
diff --git a/posthog/hogql/database/schema/numbers.py b/posthog/hogql/database/schema/numbers.py
index 6db50440cda9e..01c09ac66d797 100644
--- a/posthog/hogql/database/schema/numbers.py
+++ b/posthog/hogql/database/schema/numbers.py
@@ -1,4 +1,4 @@
-from typing import Dict
+from typing import Dict, Optional
from posthog.hogql.database.models import (
IntegerDatabaseField,
@@ -14,9 +14,9 @@
class NumbersTable(FunctionCallTable):
fields: Dict[str, FieldOrTable] = NUMBERS_TABLE_FIELDS
- name = "numbers"
- min_args = 1
- max_args = 2
+ name: str = "numbers"
+ min_args: Optional[int] = 1
+ max_args: Optional[int] = 2
def to_printed_clickhouse(self, context):
return "numbers"
diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py
index c4f1980df5491..b8d79e86d9780 100644
--- a/posthog/hogql/database/schema/session_replay_events.py
+++ b/posthog/hogql/database/schema/session_replay_events.py
@@ -31,6 +31,8 @@
"console_warn_count": IntegerDatabaseField(name="console_warn_count"),
"console_error_count": IntegerDatabaseField(name="console_error_count"),
"size": IntegerDatabaseField(name="size"),
+ "event_count": IntegerDatabaseField(name="event_count"),
+ "message_count": IntegerDatabaseField(name="message_count"),
"pdi": LazyJoin(
from_field="distinct_id",
join_table=PersonDistinctIdsTable(),
@@ -77,6 +79,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str
"console_error_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "console_error_count"])]),
"distinct_id": ast.Call(name="any", args=[ast.Field(chain=[table_name, "distinct_id"])]),
"size": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "size"])]),
+ "event_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "event_count"])]),
+ "message_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "message_count"])]),
}
select_fields: List[ast.Expr] = []
diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr
index 166391d344856..9e1413d84a0bf 100644
--- a/posthog/hogql/database/test/__snapshots__/test_database.ambr
+++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr
@@ -276,6 +276,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -405,6 +413,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -849,6 +865,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -978,6 +1002,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py
index 670b98cfd45e5..bd63ce32754c0 100644
--- a/posthog/hogql/placeholders.py
+++ b/posthog/hogql/placeholders.py
@@ -32,7 +32,7 @@ def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]):
def visit_placeholder(self, node):
if not self.placeholders:
raise HogQLException(f"Placeholders, such as {{{node.field}}}, are not supported in this context")
- if node.field in self.placeholders:
+ if node.field in self.placeholders and self.placeholders[node.field] is not None:
new_node = self.placeholders[node.field]
new_node.start = node.start
new_node.end = node.end
diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py
index 3caa10d51f8f6..81efafc225a1f 100644
--- a/posthog/hogql/property.py
+++ b/posthog/hogql/property.py
@@ -15,7 +15,7 @@
from posthog.models.property import PropertyGroup
from posthog.models.property.util import build_selector_regex
from posthog.models.property_definition import PropertyType
-from posthog.schema import PropertyOperator
+from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator
def has_aggregation(expr: AST) -> bool:
@@ -59,16 +59,30 @@ def property_to_expr(property: Union[BaseModel, PropertyGroup, Property, dict, l
return ast.And(exprs=properties)
elif isinstance(property, Property):
pass
- elif isinstance(property, PropertyGroup):
- if property.type != PropertyOperatorType.AND and property.type != PropertyOperatorType.OR:
+ elif (
+ isinstance(property, PropertyGroup)
+ or isinstance(property, PropertyGroupFilter)
+ or isinstance(property, PropertyGroupFilterValue)
+ ):
+ if (
+ isinstance(property, PropertyGroup)
+ and property.type != PropertyOperatorType.AND
+ and property.type != PropertyOperatorType.OR
+ ):
raise NotImplementedException(f'PropertyGroup of unknown type "{property.type}"')
+ if (
+ (isinstance(property, PropertyGroupFilter) or isinstance(property, PropertyGroupFilterValue))
+ and property.type != FilterLogicalOperator.AND
+ and property.type != FilterLogicalOperator.OR
+ ):
+ raise NotImplementedException(f'PropertyGroupFilter of unknown type "{property.type}"')
if len(property.values) == 0:
return ast.Constant(value=True)
if len(property.values) == 1:
return property_to_expr(property.values[0], team)
- if property.type == PropertyOperatorType.AND:
+ if property.type == PropertyOperatorType.AND or property.type == FilterLogicalOperator.AND:
return ast.And(exprs=[property_to_expr(p, team) for p in property.values])
else:
return ast.Or(exprs=[property_to_expr(p, team) for p in property.values])
diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py
index 584dbfab67d0d..54dbcb17a9eb6 100644
--- a/posthog/hogql/test/test_metadata.py
+++ b/posthog/hogql/test/test_metadata.py
@@ -8,10 +8,12 @@ class TestMetadata(ClickhouseTestMixin, APIBaseTest):
maxDiff = None
def _expr(self, query: str) -> HogQLMetadataResponse:
- return get_hogql_metadata(query=HogQLMetadata(expr=query), team=self.team)
+ return get_hogql_metadata(query=HogQLMetadata(kind="HogQLMetadata", expr=query, response=None), team=self.team)
def _select(self, query: str) -> HogQLMetadataResponse:
- return get_hogql_metadata(query=HogQLMetadata(select=query), team=self.team)
+ return get_hogql_metadata(
+ query=HogQLMetadata(kind="HogQLMetadata", select=query, response=None), team=self.team
+ )
def test_metadata_valid_expr_select(self):
metadata = self._expr("select 1")
diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py
index ed84eeaf4af6d..d45a1edcb9672 100644
--- a/posthog/hogql/test/test_query.py
+++ b/posthog/hogql/test/test_query.py
@@ -13,7 +13,7 @@
from posthog.models import Cohort
from posthog.models.cohort.util import recalculate_cohortpeople
from posthog.models.utils import UUIDT
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.schema import HogQLFilters, EventPropertyFilter, DateRange, QueryTiming
from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
from posthog.warehouse.models import DataWarehouseSavedQuery, DataWarehouseViewLink
@@ -1446,7 +1446,7 @@ def test_hogql_query_filters(self):
)
query = "SELECT event, distinct_id from events WHERE distinct_id={distinct_id} and {filters}"
filters = HogQLFilters(
- properties=[EventPropertyFilter(key="index", operator="exact", value=4, type="event")]
+ properties=[EventPropertyFilter(key="index", operator="exact", value="4", type="event")]
)
placeholders = {"distinct_id": ast.Constant(value=random_uuid)}
response = execute_hogql_query(query, team=self.team, filters=filters, placeholders=placeholders)
diff --git a/posthog/hogql_queries/lifecycle_hogql_query.py b/posthog/hogql_queries/lifecycle_hogql_query.py
deleted file mode 100644
index 2df71a976d1a9..0000000000000
--- a/posthog/hogql_queries/lifecycle_hogql_query.py
+++ /dev/null
@@ -1,176 +0,0 @@
-from typing import Dict, Any
-
-from django.utils.timezone import datetime
-
-from posthog.hogql import ast
-from posthog.hogql.parser import parse_expr, parse_select
-from posthog.hogql.query import execute_hogql_query
-from posthog.hogql_queries.query_date_range import QueryDateRange
-from posthog.models import Team
-from posthog.schema import LifecycleQuery
-
-
-def create_time_filter(date_range: QueryDateRange) -> ast.Expr:
- # don't need timezone here, as HogQL will use the project timezone automatically
- # :TRICKY: We fetch all data even for the period before the graph starts up until the end of the last period
- time_filter = parse_expr(
- """
- (timestamp >= dateTrunc({interval}, {date_from}) - {one_interval_period})
- AND
- (timestamp < dateTrunc({interval}, {date_to}) + {one_interval_period})
- """,
- placeholders={
- "date_from": date_range.date_from_as_hogql,
- "date_to": date_range.date_to_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
- "interval": date_range.interval_period_string_as_hogql,
- },
- )
-
- return time_filter
-
-
-def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr):
- if not event_filter:
- event_filter = ast.Constant(value=True)
-
- placeholders = {
- "event_filter": event_filter,
- "interval": date_range.interval_period_string_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
- }
-
- events_query = parse_select(
- """
- SELECT
- events.person.id as person_id,
- min(events.person.created_at) AS created_at,
- arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity,
- arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity,
- arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity,
- arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status,
- arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status,
- arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods,
- arrayMap(x -> 'dormant', dormant_periods) as dormant_label,
- arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat,
- arrayJoin(temp_concat) as period_status_pairs,
- period_status_pairs.1 as start_of_period,
- period_status_pairs.2 as status
- FROM events
- WHERE {event_filter}
- GROUP BY person_id
- """,
- placeholders=placeholders,
- )
- return events_query
-
-
-def run_lifecycle_query(
- team: Team,
- query: LifecycleQuery,
-) -> Dict[str, Any]:
- now_dt = datetime.now()
-
- query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt)
-
- interval = query_date_range.interval.name
- one_interval_period = query_date_range.one_interval_period_as_hogql
- number_interval_period = query_date_range.interval_periods_as_hogql("number")
-
- time_filter = create_time_filter(query_date_range)
- event_filter = time_filter # TODO: add all other filters
-
- placeholders = {
- "interval": ast.Constant(value=interval),
- "one_interval_period": one_interval_period,
- "number_interval_period": number_interval_period,
- "event_filter": event_filter,
- "date_from": query_date_range.date_from_as_hogql,
- "date_to": query_date_range.date_to_as_hogql,
- }
-
- events_query = create_events_query(date_range=query_date_range, event_filter=event_filter)
-
- periods = parse_select(
- """
- SELECT (
- dateTrunc({interval}, {date_to}) - {number_interval_period}
- ) AS start_of_period
- FROM numbers(
- dateDiff(
- {interval},
- dateTrunc({interval}, {date_from}),
- dateTrunc({interval}, {date_to} + {one_interval_period})
- )
- )
- """,
- placeholders=placeholders,
- )
-
- lifecycle_sql = parse_select(
- """
- SELECT groupArray(start_of_period) AS date,
- groupArray(counts) AS total,
- status
- FROM (
- SELECT
- status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
- start_of_period,
- status
- FROM (
- SELECT
- periods.start_of_period as start_of_period,
- 0 AS counts,
- status
- FROM {periods} as periods
- CROSS JOIN (
- SELECT status
- FROM (SELECT 1)
- ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
- ) as sec
- ORDER BY status, start_of_period
- UNION ALL
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
- GROUP BY start_of_period, status
- )
- WHERE start_of_period <= dateTrunc({interval}, {date_to})
- AND start_of_period >= dateTrunc({interval}, {date_from})
- GROUP BY start_of_period, status
- ORDER BY start_of_period ASC
- )
- GROUP BY status
- """,
- {**placeholders, "periods": periods, "events_query": events_query},
- )
-
- response = execute_hogql_query(
- team=team,
- query=lifecycle_sql,
- query_type="LifecycleQuery",
- )
-
- # ensure that the items are in a deterministic order
- order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4}
- results = sorted(response.results, key=lambda result: order.get(result[2], result[2]))
-
- res = []
- for val in results:
- counts = val[1]
- labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if interval == "hour" else "")) for item in val[0]]
- days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if interval == "hour" else "")) for item in val[0]]
-
- label = "{} - {}".format("", val[2]) # entity.name
- additional_values = {"label": label, "status": val[2]}
- res.append(
- {
- "data": [float(c) for c in counts],
- "count": float(sum(counts)),
- "labels": labels,
- "days": days,
- **additional_values,
- }
- )
-
- return {"result": res}
diff --git a/posthog/hogql_queries/lifecycle_query_runner.py b/posthog/hogql_queries/lifecycle_query_runner.py
new file mode 100644
index 0000000000000..ea43b3cd69aa0
--- /dev/null
+++ b/posthog/hogql_queries/lifecycle_query_runner.py
@@ -0,0 +1,252 @@
+from typing import Optional, Any, Dict, List
+
+from django.utils.timezone import datetime
+
+from posthog.hogql import ast
+from posthog.hogql.parser import parse_expr, parse_select
+from posthog.hogql.property import property_to_expr, action_to_expr
+from posthog.hogql.query import execute_hogql_query
+from posthog.hogql.timings import HogQLTimings
+from posthog.hogql_queries.query_runner import QueryRunner
+from posthog.models import Team, Action
+from posthog.hogql_queries.utils.query_date_range import QueryDateRange
+from posthog.models.filters.mixins.utils import cached_property
+from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse
+
+
+class LifecycleQueryRunner(QueryRunner):
+ query: LifecycleQuery
+
+ def __init__(self, query: LifecycleQuery | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None):
+ super().__init__(team, timings)
+ if isinstance(query, LifecycleQuery):
+ self.query = query
+ else:
+ self.query = LifecycleQuery.model_validate(query)
+
+ def to_query(self) -> ast.SelectQuery:
+ placeholders = {
+ **self.query_date_range.to_placeholders(),
+ "events_query": self.events_query,
+ "periods_query": self.periods_query,
+ }
+ with self.timings.measure("lifecycle_query"):
+ lifecycle_query = parse_select(
+ """
+ SELECT groupArray(start_of_period) AS date,
+ groupArray(counts) AS total,
+ status
+ FROM (
+ SELECT
+ status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
+ start_of_period,
+ status
+ FROM (
+ SELECT
+ periods.start_of_period as start_of_period,
+ 0 AS counts,
+ status
+ FROM {periods_query} as periods
+ CROSS JOIN (
+ SELECT status
+ FROM (SELECT 1)
+ ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
+ ) as sec
+ ORDER BY status, start_of_period
+ UNION ALL
+ SELECT
+ start_of_period, count(DISTINCT person_id) AS counts, status
+ FROM {events_query}
+ GROUP BY start_of_period, status
+ )
+ WHERE start_of_period <= dateTrunc({interval}, {date_to})
+ AND start_of_period >= dateTrunc({interval}, {date_from})
+ GROUP BY start_of_period, status
+ ORDER BY start_of_period ASC
+ )
+ GROUP BY status
+ """,
+ placeholders,
+ timings=self.timings,
+ )
+ return lifecycle_query
+
+ def to_persons_query(self) -> str:
+ # TODO: add support for selecting and filtering by breakdowns
+ with self.timings.measure("persons_query"):
+ return parse_select(
+ """
+ SELECT
+ person_id, start_of_period as breakdown_1, status as breakdown_2
+ FROM
+ {events_query}
+ """,
+ placeholders={"events_query": self.events_query},
+ )
+
+ def run(self) -> LifecycleQueryResponse:
+ response = execute_hogql_query(
+ query_type="LifecycleQuery",
+ query=self.to_query(),
+ team=self.team,
+ timings=self.timings,
+ )
+
+ # TODO: can we move the data conversion part into the query as well? It would make it easier to swap
+ # e.g. the LifecycleQuery with HogQLQuery, while keeping the chart logic the same.
+
+ # ensure that the items are in a deterministic order
+ order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4}
+ results = sorted(response.results, key=lambda result: order.get(result[2], 5))
+
+ res = []
+ for val in results:
+ counts = val[1]
+ labels = [
+ item.strftime("%-d-%b-%Y{}".format(" %H:%M" if self.query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
+ days = [
+ item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if self.query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
+
+ label = "{} - {}".format("", val[2]) # entity.name
+ additional_values = {"label": label, "status": val[2]}
+ res.append(
+ {
+ "data": [float(c) for c in counts],
+ "count": float(sum(counts)),
+ "labels": labels,
+ "days": days,
+ **additional_values,
+ }
+ )
+
+ return LifecycleQueryResponse(result=res, timings=response.timings)
+
+ @cached_property
+ def query_date_range(self):
+ return QueryDateRange(
+ date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now()
+ )
+
+ @cached_property
+ def event_filter(self) -> ast.Expr:
+ event_filters: List[ast.Expr] = []
+ with self.timings.measure("date_range"):
+ event_filters.append(
+ parse_expr(
+ "timestamp >= dateTrunc({interval}, {date_from}) - {one_interval}",
+ {
+ "interval": self.query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": self.query_date_range.one_interval_period(),
+ "date_from": self.query_date_range.date_from_as_hogql(),
+ },
+ timings=self.timings,
+ )
+ )
+ event_filters.append(
+ parse_expr(
+ "timestamp < dateTrunc({interval}, {date_to}) + {one_interval}",
+ {
+ "interval": self.query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": self.query_date_range.one_interval_period(),
+ "date_to": self.query_date_range.date_to_as_hogql(),
+ },
+ timings=self.timings,
+ )
+ )
+ with self.timings.measure("properties"):
+ if self.query.properties is not None and self.query.properties != []:
+ event_filters.append(property_to_expr(self.query.properties, self.team))
+ with self.timings.measure("series_filters"):
+ for serie in self.query.series or []:
+ if isinstance(serie, ActionsNode):
+ action = Action.objects.get(pk=int(serie.id), team=self.team)
+ event_filters.append(action_to_expr(action))
+ elif isinstance(serie, EventsNode):
+ if serie.event is not None:
+ event_filters.append(
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Field(chain=["event"]),
+ right=ast.Constant(value=str(serie.event)),
+ )
+ )
+ else:
+ raise ValueError(f"Invalid serie kind: {serie.kind}")
+ if serie.properties is not None and serie.properties != []:
+ event_filters.append(property_to_expr(serie.properties, self.team))
+ with self.timings.measure("test_account_filters"):
+ if (
+ self.query.filterTestAccounts
+ and isinstance(self.team.test_account_filters, list)
+ and len(self.team.test_account_filters) > 0
+ ):
+ for property in self.team.test_account_filters:
+ event_filters.append(property_to_expr(property, self.team))
+
+ if len(event_filters) == 0:
+ return ast.Constant(value=True)
+ elif len(event_filters) == 1:
+ return event_filters[0]
+ else:
+ return ast.And(exprs=event_filters)
+
+ @cached_property
+ def events_query(self):
+ with self.timings.measure("events_query"):
+ events_query = parse_select(
+ """
+ SELECT
+ events.person.id as person_id,
+ min(events.person.created_at) AS created_at,
+ arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity,
+ arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity,
+ arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity,
+ arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status,
+ arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status,
+ arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods,
+ arrayMap(x -> 'dormant', dormant_periods) as dormant_label,
+ arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat,
+ arrayJoin(temp_concat) as period_status_pairs,
+ period_status_pairs.1 as start_of_period,
+ period_status_pairs.2 as status
+ FROM events
+ WHERE {event_filter}
+ GROUP BY person_id
+ """,
+ placeholders={
+ **self.query_date_range.to_placeholders(),
+ "event_filter": self.event_filter,
+ },
+ timings=self.timings,
+ )
+ sampling_factor = self.query.samplingFactor
+ if sampling_factor is not None and isinstance(sampling_factor, float):
+ sample_expr = ast.SampleExpr(sample_value=ast.RatioExpr(left=ast.Constant(value=sampling_factor)))
+ events_query.select_from.sample = sample_expr
+
+ return events_query
+
+ @cached_property
+ def periods_query(self):
+ with self.timings.measure("periods_query"):
+ periods_query = parse_select(
+ """
+ SELECT (
+ dateTrunc({interval}, {date_to}) - {number_interval_period}
+ ) AS start_of_period
+ FROM numbers(
+ dateDiff(
+ {interval},
+ dateTrunc({interval}, {date_from}),
+ dateTrunc({interval}, {date_to} + {one_interval_period})
+ )
+ )
+ """,
+ placeholders=self.query_date_range.to_placeholders(),
+ timings=self.timings,
+ )
+ return periods_query
diff --git a/posthog/hogql_queries/query_date_range.py b/posthog/hogql_queries/query_date_range.py
deleted file mode 100644
index 4d76b222deb2b..0000000000000
--- a/posthog/hogql_queries/query_date_range.py
+++ /dev/null
@@ -1,114 +0,0 @@
-from datetime import datetime
-from functools import cached_property, lru_cache
-from typing import Optional
-
-import pytz
-from dateutil.relativedelta import relativedelta
-
-from posthog.hogql.parser import parse_expr, ast
-from posthog.models.team import Team
-from posthog.queries.util import get_earliest_timestamp
-from posthog.schema import DateRange, IntervalType
-from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
-
-
-# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
-class QueryDateRange:
- """Translation of the raw `date_from` and `date_to` filter values to datetimes."""
-
- _team: Team
- _date_range: Optional[DateRange]
- _interval: Optional[IntervalType]
- _now_non_timezone: datetime
-
- def __init__(
- self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
- ) -> None:
- self._team = team
- self._date_range = date_range
- self._interval = interval
- self._now_non_timezone = now
-
- @cached_property
- def date_to(self) -> datetime:
- date_to = self._now
- delta_mapping = None
-
- if self._date_range and self._date_range.date_to:
- date_to, delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self._now
- )
-
- is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
- if not self.is_hourly():
- date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
- elif is_relative:
- date_to = date_to.replace(minute=59, second=59, microsecond=999999)
-
- return date_to
-
- def get_earliest_timestamp(self):
- return get_earliest_timestamp(self._team.pk)
-
- @cached_property
- def date_from(self) -> datetime:
- date_from: datetime
- if self._date_range and self._date_range.date_from == "all":
- date_from = self.get_earliest_timestamp()
- elif self._date_range and isinstance(self._date_range.date_from, str):
- date_from = relative_date_parse(self._date_range.date_from, self._team.timezone_info, now=self._now)
- else:
- date_from = self._now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
- days=DEFAULT_DATE_FROM_DAYS
- )
-
- if not self.is_hourly():
- date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0)
-
- return date_from
-
- @cached_property
- def _now(self):
- return self._localize_to_team(self._now_non_timezone)
-
- def _localize_to_team(self, target: datetime):
- return target.astimezone(pytz.timezone(self._team.timezone))
-
- @cached_property
- def date_to_str(self) -> str:
- return self.date_to.strftime("%Y-%m-%d %H:%M:%S")
-
- @cached_property
- def date_from_str(self) -> str:
- return self.date_from.strftime("%Y-%m-%d %H:%M:%S")
-
- def is_hourly(self):
- return self.interval.name == "hour"
-
- @cached_property
- def date_to_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_to_str}'))")
-
- @cached_property
- def date_from_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_from_str}'))")
-
- @cached_property
- def interval(self):
- return self._interval or IntervalType.day
-
- @cached_property
- def one_interval_period_as_hogql(self):
- return parse_expr(f"toInterval{self.interval.capitalize()}(1)")
-
- @lru_cache
- def interval_periods_as_hogql(self, s: str):
- return parse_expr(f"toInterval{self.interval.capitalize()}({s})")
-
- @cached_property
- def interval_period_string(self):
- return self.interval.value
-
- @cached_property
- def interval_period_string_as_hogql(self):
- return ast.Constant(value=self.interval.value)
diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py
new file mode 100644
index 0000000000000..b8a3a10a4aa7b
--- /dev/null
+++ b/posthog/hogql_queries/query_runner.py
@@ -0,0 +1,37 @@
+from typing import Optional
+
+from pydantic import BaseModel
+
+from posthog.hogql import ast
+from posthog.hogql.context import HogQLContext
+from posthog.hogql.printer import print_ast
+from posthog.hogql.timings import HogQLTimings
+from posthog.models import Team
+
+
+class QueryRunner:
+ query: BaseModel
+ team: Team
+ timings: HogQLTimings
+
+ def __init__(self, team: Team, timings: Optional[HogQLTimings] = None):
+ self.team = team
+ self.timings = timings or HogQLTimings()
+
+ def run(self) -> BaseModel:
+ raise NotImplementedError()
+
+ def to_query(self) -> ast.SelectQuery:
+ raise NotImplementedError()
+
+ def to_persons_query(self) -> str:
+ # TODO: add support for selecting and filtering by breakdowns
+ raise NotImplementedError()
+
+ def to_hogql(self) -> str:
+ with self.timings.measure("to_hogql"):
+ return print_ast(
+ self.to_query(),
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True, timings=self.timings),
+ "hogql",
+ )
diff --git a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
index 5cc56252b046f..d9996640f64c3 100644
--- a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
+++ b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
@@ -3,9 +3,8 @@
from freezegun import freeze_time
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner
from posthog.models.utils import UUIDT
-from posthog.hogql_queries.lifecycle_hogql_query import create_events_query, create_time_filter, run_lifecycle_query
-from posthog.hogql_queries.query_date_range import QueryDateRange
from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode
from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
@@ -67,105 +66,29 @@ def _create_test_events(self):
]
)
- def _run_events_query(self, date_from, date_to, interval):
- date_range = QueryDateRange(
- date_range=DateRange(date_from=date_from, date_to=date_to),
- team=self.team,
- interval=interval,
- now=datetime.strptime("2020-01-30T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"),
+ def _create_query_runner(self, date_from, date_to, interval) -> LifecycleQueryRunner:
+ series = [EventsNode(event="$pageview")]
+ query = LifecycleQuery(
+ dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series
)
- time_filter = create_time_filter(date_range)
+ return LifecycleQueryRunner(team=self.team, query=query)
- # TODO probably doesn't make sense to test like this
- # maybe this query should be what is returned by the function
- events_query = create_events_query(event_filter=time_filter, date_range=date_range)
+ def _run_events_query(self, date_from, date_to, interval):
+ events_query = self._create_query_runner(date_from, date_to, interval).events_query
return execute_hogql_query(
team=self.team,
query="""
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
- GROUP BY start_of_period, status
- """,
- query_type="LifecycleQuery",
+ SELECT
+ start_of_period, count(DISTINCT person_id) AS counts, status
+ FROM {events_query}
+ GROUP BY start_of_period, status
+ """,
placeholders={"events_query": events_query},
+ query_type="LifecycleEventsQuery",
)
- def test_events_query_whole_range(self):
- self._create_test_events()
-
- date_from = "2020-01-09"
- date_to = "2020-01-19"
-
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2
- (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4
- (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4
- (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- def test_events_query_partial_range(self):
- self._create_test_events()
- date_from = "2020-01-12"
- date_to = "2020-01-14"
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- # def test_start_on_dormant(self):
- # self.create_test_events()
- # date_from = "2020-01-13"
- # date_to = "2020-01-14"
- # response = self.run_events_query(date_from, date_to, IntervalType.day)
- #
- # self.assertEqual(
- # {
- # (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- # # TODO this currently fails, as it treats p1 as resurrecting.
- # # This might just be fine, later in the query we would just throw away results before the 13th
- # (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- # (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- # (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- # },
- # set(response.results),
- # )
-
def _run_lifecycle_query(self, date_from, date_to, interval):
- series = [EventsNode(event="$pageview")]
- query = LifecycleQuery(
- dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series
- )
- return run_lifecycle_query(team=self.team, query=query)
+ return self._create_query_runner(date_from, date_to, interval).run()
def test_lifecycle_query_whole_range(self):
self._create_test_events()
@@ -175,7 +98,7 @@ def test_lifecycle_query_whole_range(self):
response = self._run_lifecycle_query(date_from, date_to, IntervalType.day)
- statuses = [res["status"] for res in response["result"]]
+ statuses = [res["status"] for res in response.result]
self.assertEqual(["new", "returning", "resurrecting", "dormant"], statuses)
self.assertEqual(
@@ -357,5 +280,54 @@ def test_lifecycle_query_whole_range(self):
"status": "dormant",
},
],
- response["result"],
+ response.result,
+ )
+
+ def test_events_query_whole_range(self):
+ self._create_test_events()
+
+ date_from = "2020-01-09"
+ date_to = "2020-01-19"
+
+ response = self._run_events_query(date_from, date_to, IntervalType.day)
+
+ self.assertEqual(
+ {
+ (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2
+ (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2
+ (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
+ (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
+ (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
+ (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
+ (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
+ (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4
+ (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4
+ (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1
+ (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1
+ },
+ set(response.results),
+ )
+
+ def test_events_query_partial_range(self):
+ self._create_test_events()
+ date_from = "2020-01-12"
+ date_to = "2020-01-14"
+ response = self._run_events_query(date_from, date_to, IntervalType.day)
+
+ self.assertEqual(
+ {
+ (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
+ (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
+ (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
+ (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
+ (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
+ },
+ set(response.results),
)
diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py
new file mode 100644
index 0000000000000..a9c86614cac5f
--- /dev/null
+++ b/posthog/hogql_queries/utils/query_date_range.py
@@ -0,0 +1,124 @@
+import re
+from functools import cached_property
+from datetime import datetime
+from typing import Optional, Dict
+from zoneinfo import ZoneInfo
+
+from dateutil.relativedelta import relativedelta
+
+from posthog.hogql.parser import ast
+from posthog.models.team import Team
+from posthog.queries.util import get_earliest_timestamp
+from posthog.schema import DateRange, IntervalType
+from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
+
+
+# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
+class QueryDateRange:
+ """Translation of the raw `date_from` and `date_to` filter values to datetimes."""
+
+ _team: Team
+ _date_range: Optional[DateRange]
+ _interval: Optional[IntervalType]
+ _now_without_timezone: datetime
+
+ def __init__(
+ self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
+ ) -> None:
+ self._team = team
+ self._date_range = date_range
+ self._interval = interval or IntervalType.day
+ self._now_without_timezone = now
+
+ if not isinstance(self._interval, IntervalType) or re.match(r"[^a-z]", self._interval.name):
+ raise ValueError(f"Invalid interval: {interval}")
+
+ def date_to(self) -> datetime:
+ date_to = self.now_with_timezone
+ delta_mapping = None
+
+ if self._date_range and self._date_range.date_to:
+ date_to, delta_mapping = relative_date_parse_with_delta_mapping(
+ self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone
+ )
+
+ is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
+ if not self.is_hourly:
+ date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
+ elif is_relative:
+ date_to = date_to.replace(minute=59, second=59, microsecond=999999)
+
+ return date_to
+
+ def get_earliest_timestamp(self) -> datetime:
+ return get_earliest_timestamp(self._team.pk)
+
+ def date_from(self) -> datetime:
+ date_from: datetime
+ if self._date_range and self._date_range.date_from == "all":
+ date_from = self.get_earliest_timestamp()
+ elif self._date_range and isinstance(self._date_range.date_from, str):
+ date_from = relative_date_parse(
+ self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone
+ )
+ else:
+ date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
+ days=DEFAULT_DATE_FROM_DAYS
+ )
+
+ if not self.is_hourly:
+ date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0)
+
+ return date_from
+
+ @cached_property
+ def now_with_timezone(self) -> datetime:
+ return self._now_without_timezone.astimezone(ZoneInfo(self._team.timezone))
+
+ @cached_property
+ def date_to_str(self) -> str:
+ return self.date_to().strftime("%Y-%m-%d %H:%M:%S")
+
+ @cached_property
+ def date_from_str(self) -> str:
+ return self.date_from().strftime("%Y-%m-%d %H:%M:%S")
+
+ @cached_property
+ def is_hourly(self) -> bool:
+ return self.interval_name == "hour"
+
+ @cached_property
+ def interval_type(self) -> IntervalType:
+ return self._interval or IntervalType.day
+
+ @cached_property
+ def interval_name(self) -> str:
+ return self.interval_type.name
+
+ def date_to_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])]
+ )
+
+ def date_from_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])]
+ )
+
+ def one_interval_period(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)])
+
+ def number_interval_periods(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])])
+
+ def interval_period_string_as_hogql_constant(self) -> ast.Expr:
+ return ast.Constant(value=self.interval_name)
+
+ def to_placeholders(self) -> Dict[str, ast.Expr]:
+ return {
+ "interval": self.interval_period_string_as_hogql_constant(),
+ "one_interval_period": self.one_interval_period(),
+ "number_interval_period": self.number_interval_periods(),
+ "date_from": self.date_from_as_hogql(),
+ "date_to": self.date_to_as_hogql(),
+ }
diff --git a/posthog/hogql_queries/test/test_query_date_range.py b/posthog/hogql_queries/utils/test/test_query_date_range.py
similarity index 54%
rename from posthog/hogql_queries/test/test_query_date_range.py
rename to posthog/hogql_queries/utils/test/test_query_date_range.py
index 82966cc5f1bff..0ab8467567a50 100644
--- a/posthog/hogql_queries/test/test_query_date_range.py
+++ b/posthog/hogql_queries/utils/test/test_query_date_range.py
@@ -1,6 +1,6 @@
from dateutil import parser
-from posthog.hogql_queries.query_date_range import QueryDateRange
+from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.schema import DateRange, IntervalType
from posthog.test.base import APIBaseTest
@@ -10,32 +10,17 @@ def test_parsed_date(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T23:59:59.999999Z"))
def test_parsed_date_hour(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T00:59:59.999999Z"),
+ query_date_range.date_to(), parser.isoparse("2021-08-25T00:59:59.999999Z")
) # ensure last hour is included
def test_parsed_date_middle_of_hour(self):
@@ -43,34 +28,25 @@ def test_parsed_date_middle_of_hour(self):
date_range = DateRange(date_from="2021-08-23 05:00:00", date_to="2021-08-26 07:00:00")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
- self.assertEqual(parsed_date_from, parser.isoparse("2021-08-23 05:00:00Z"))
- self.assertEqual(parsed_date_to, parser.isoparse("2021-08-26 07:00:00Z")) # ensure last hour is included
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23 05:00:00Z"))
+ self.assertEqual(
+ query_date_range.date_to(), parser.isoparse("2021-08-26 07:00:00Z")
+ ) # ensure last hour is included
def test_parsed_date_week(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-7d")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.week, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-18 00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25 23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-18 00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25 23:59:59.999999Z"))
def test_is_hourly(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- self.assertFalse(query_date_range.is_hourly())
+ self.assertFalse(query_date_range.is_hourly)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- self.assertTrue(query_date_range.is_hourly())
+ self.assertTrue(query_date_range.is_hourly)
diff --git a/posthog/management/commands/send_usage_report.py b/posthog/management/commands/send_usage_report.py
index 4c67d451c2a8a..03e4b4a102da4 100644
--- a/posthog/management/commands/send_usage_report.py
+++ b/posthog/management/commands/send_usage_report.py
@@ -1,5 +1,3 @@
-import pprint
-
from django.core.management.base import BaseCommand
from posthog.tasks.usage_report import send_all_org_usage_reports
@@ -10,7 +8,6 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it")
- parser.add_argument("--print-reports", type=bool, help="Print the reports in full")
parser.add_argument("--date", type=str, help="The date to be ran in format YYYY-MM-DD")
parser.add_argument("--event-name", type=str, help="Override the event name to be sent - for testing")
parser.add_argument(
@@ -28,20 +25,14 @@ def handle(self, *args, **options):
run_async = options["async"]
if run_async:
- results = send_all_org_usage_reports.delay(
+ send_all_org_usage_reports.delay(
dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
)
else:
- results = send_all_org_usage_reports(
+ send_all_org_usage_reports(
dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
)
- if options["print_reports"]:
- print("") # noqa T201
- pprint.pprint(results) # noqa T203
- print("") # noqa T201
if dry_run:
print("Dry run so not sent.") # noqa T201
- else:
- print(f"{len(results)} Reports sent!") # noqa T201
print("Done!") # noqa T201
diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py
new file mode 100644
index 0000000000000..bfe4b079b9945
--- /dev/null
+++ b/posthog/migrations/0350_add_notebook_text_content.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.19 on 2023-09-12 18:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0349_update_survey_query_name"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="notebook",
+ name="text_content",
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/posthog/models/__init__.py b/posthog/models/__init__.py
index 82679097f6ca2..89432e0809984 100644
--- a/posthog/models/__init__.py
+++ b/posthog/models/__init__.py
@@ -10,6 +10,7 @@
BatchExportDestination,
BatchExportRun,
)
+from ..warehouse.models import DataWarehouseTable
from .cohort import Cohort, CohortPeople
from .dashboard import Dashboard
from .dashboard_tile import DashboardTile, Text
@@ -36,13 +37,10 @@
from .organization_domain import OrganizationDomain
from .person import Person, PersonDistinctId, PersonOverride, PersonOverrideMapping
from .personal_api_key import PersonalAPIKey
-from .plugin import Plugin, PluginAttachment, PluginConfig, PluginSourceFile
+from .plugin import Plugin, PluginAttachment, PluginConfig, PluginSourceFile, PluginLogEntry
from .prompt.prompt import Prompt, PromptSequence, UserPromptState
from .property import Property
from .property_definition import PropertyDefinition
-from .session_recording import SessionRecording
-from .session_recording_playlist import SessionRecordingPlaylist
-from .session_recording_playlist_item import SessionRecordingPlaylistItem
from .sharing_configuration import SharingConfiguration
from .subscription import Subscription
from .feedback.survey import Survey
@@ -52,6 +50,9 @@
from .uploaded_media import UploadedMedia
from .user import User, UserManager
from .user_scene_personalisation import UserScenePersonalisation
+from ..session_recordings.models.session_recording import SessionRecording
+from ..session_recordings.models.session_recording_playlist import SessionRecordingPlaylist
+from ..session_recordings.models.session_recording_playlist_item import SessionRecordingPlaylistItem
__all__ = [
"Action",
diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py
index ba47b2c326ff1..f3b36e2c3dbd0 100644
--- a/posthog/models/activity_logging/activity_log.py
+++ b/posthog/models/activity_logging/activity_log.py
@@ -99,7 +99,7 @@ class Meta:
field_exclusions: Dict[ActivityScope, List[str]] = {
- "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by"],
+ "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"],
"FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"],
"Person": [
"id",
diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr
index 922fdf12a27f1..9be8465ff5f0f 100644
--- a/posthog/models/filters/test/__snapshots__/test_filter.ambr
+++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr
@@ -11,6 +11,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -22,6 +23,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -60,6 +62,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -71,6 +74,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -109,6 +113,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -120,6 +125,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -158,6 +164,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -169,6 +176,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -207,6 +215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -218,6 +227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py
index dde92fddab944..490645909df26 100644
--- a/posthog/models/notebook/notebook.py
+++ b/posthog/models/notebook/notebook.py
@@ -12,6 +12,7 @@ class Notebook(UUIDModel):
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE)
title: models.CharField = models.CharField(max_length=256, blank=True, null=True)
content: JSONField = JSONField(default=None, null=True, blank=True)
+ text_content: models.TextField = models.TextField(blank=True, null=True)
deleted: models.BooleanField = models.BooleanField(default=False)
version: models.IntegerField = models.IntegerField(default=0)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True)
diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py
index 9dd81be1b3a5b..18368ac082f5d 100644
--- a/posthog/models/property/util.py
+++ b/posthog/models/property/util.py
@@ -48,7 +48,7 @@
from posthog.models.property.property import ValueT
from posthog.models.team.team import groups_on_events_querying_enabled
from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query
-from posthog.queries.session_query import SessionQuery
+from posthog.session_recordings.queries.session_query import SessionQuery
from posthog.queries.util import PersonPropertiesMode
from posthog.utils import is_json, is_valid_regex
diff --git a/posthog/models/session_recording/__init__.py b/posthog/models/session_recording/__init__.py
deleted file mode 100644
index de4d98f29d25e..0000000000000
--- a/posthog/models/session_recording/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .session_recording import *
diff --git a/posthog/models/session_recording_event/__init__.py b/posthog/models/session_recording_event/__init__.py
deleted file mode 100644
index 23eedb489e29a..0000000000000
--- a/posthog/models/session_recording_event/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .session_recording_event import *
diff --git a/posthog/models/session_recording_playlist/__init__.py b/posthog/models/session_recording_playlist/__init__.py
deleted file mode 100644
index 29fa449f2cff0..0000000000000
--- a/posthog/models/session_recording_playlist/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .session_recording_playlist import *
diff --git a/posthog/models/session_recording_playlist_item/__init__.py b/posthog/models/session_recording_playlist_item/__init__.py
deleted file mode 100644
index 677b89fc1399d..0000000000000
--- a/posthog/models/session_recording_playlist_item/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .session_recording_playlist_item import *
diff --git a/posthog/queries/breakdown_props.py b/posthog/queries/breakdown_props.py
index 4e179cf4628db..9d0ccf80db32e 100644
--- a/posthog/queries/breakdown_props.py
+++ b/posthog/queries/breakdown_props.py
@@ -25,7 +25,7 @@
from posthog.queries.person_on_events_v2_sql import PERSON_OVERRIDES_JOIN_SQL
from posthog.queries.person_query import PersonQuery
from posthog.queries.query_date_range import QueryDateRange
-from posthog.queries.session_query import SessionQuery
+from posthog.session_recordings.queries.session_query import SessionQuery
from posthog.queries.trends.sql import HISTOGRAM_ELEMENTS_ARRAY_OF_KEY_SQL, TOP_ELEMENTS_ARRAY_OF_KEY_SQL
from posthog.queries.util import PersonPropertiesMode
from posthog.utils import PersonOnEventsMode
diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py
index aca34a3484cd7..9be4dc1a2fbd2 100644
--- a/posthog/queries/event_query/event_query.py
+++ b/posthog/queries/event_query/event_query.py
@@ -18,7 +18,7 @@
from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query
from posthog.queries.person_query import PersonQuery
from posthog.queries.query_date_range import QueryDateRange
-from posthog.queries.session_query import SessionQuery
+from posthog.session_recordings.queries.session_query import SessionQuery
from posthog.queries.util import PersonPropertiesMode
from posthog.utils import PersonOnEventsMode
from posthog.queries.person_on_events_v2_sql import PERSON_OVERRIDES_JOIN_SQL
diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py
index 7fe281a0c158c..7e1d8c0b6198b 100644
--- a/posthog/queries/trends/breakdown.py
+++ b/posthog/queries/trends/breakdown.py
@@ -38,7 +38,7 @@
from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query
from posthog.queries.person_query import PersonQuery
from posthog.queries.query_date_range import TIME_IN_SECONDS, QueryDateRange
-from posthog.queries.session_query import SessionQuery
+from posthog.session_recordings.queries.session_query import SessionQuery
from posthog.queries.trends.sql import (
BREAKDOWN_ACTIVE_USER_AGGREGATE_SQL,
BREAKDOWN_ACTIVE_USER_CONDITIONS_SQL,
diff --git a/posthog/schema.py b/posthog/schema.py
index 72b581e8c863c..a1da2403d0d8f 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -6,7 +6,8 @@
from enum import Enum
from typing import Any, Dict, List, Optional, Union
-from pydantic import BaseModel, Extra, Field
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+from typing_extensions import Literal
class MathGroupTypeIndex(float, Enum):
@@ -62,12 +63,12 @@ class ChartDisplayType(str, Enum):
class CohortPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
- key: str = Field("id", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ key: Literal["id"] = "id"
label: Optional[str] = None
- type: str = Field("cohort", const=True)
+ type: Literal["cohort"] = "cohort"
value: float
@@ -82,9 +83,9 @@ class CountPerActorMathType(str, Enum):
class DatabaseSchemaQueryResponseField(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
chain: Optional[List[str]] = None
fields: Optional[List[str]] = None
key: str
@@ -93,9 +94,9 @@ class Config:
class DateRange(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
date_from: Optional[str] = None
date_to: Optional[str] = None
@@ -108,9 +109,9 @@ class Key(str, Enum):
class ElementType(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
attr_class: Optional[List[str]] = None
attr_id: Optional[str] = None
attributes: Dict[str, str]
@@ -123,9 +124,9 @@ class Config:
class EmptyPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: Optional[Any] = None
operator: Optional[Any] = None
type: Optional[Any] = None
@@ -139,18 +140,18 @@ class EntityType(str, Enum):
class Person(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
distinct_ids: List[str]
is_identified: Optional[bool] = None
properties: Dict[str, Any]
class EventType(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
distinct_id: str
elements: List[ElementType]
elements_chain: Optional[str] = None
@@ -162,18 +163,10 @@ class Config:
uuid: Optional[str] = None
-class MathGroupTypeIndex1(float, Enum):
- number_0 = 0
- number_1 = 1
- number_2 = 2
- number_3 = 3
- number_4 = 4
-
-
class Response(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
next: Optional[str] = None
results: List[EventType]
@@ -204,9 +197,9 @@ class FunnelPathType(str, Enum):
class FunnelStepRangeEntityFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
custom_name: Optional[str] = None
funnel_from_step: Optional[float] = None
funnel_to_step: Optional[float] = None
@@ -234,9 +227,9 @@ class FunnelCorrelationPersonConverted(str, Enum):
class HogQLNotice(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
end: Optional[float] = None
fix: Optional[str] = None
message: str
@@ -258,9 +251,9 @@ class LifecycleToggle(str, Enum):
class PathCleaningFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
alias: Optional[str] = None
regex: Optional[str] = None
@@ -273,9 +266,9 @@ class PathType(str, Enum):
class PathsFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
edge_limit: Optional[float] = None
end_point: Optional[str] = None
exclude_events: Optional[List[str]] = None
@@ -330,21 +323,21 @@ class PropertyOperator(str, Enum):
class QueryTiming(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
k: str = Field(..., description="Key. Shortened to 'k' to save on data.")
t: float = Field(..., description="Time in seconds. Shortened to 't' to save on data.")
class RecordingDurationFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
- key: str = Field("duration", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ key: Literal["duration"] = "duration"
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("recording", const=True)
+ type: Literal["recording"] = "recording"
value: float
@@ -366,30 +359,62 @@ class RetentionType(str, Enum):
class SavedInsightNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
- embedded: Optional[bool] = Field(None, description="Query is embedded inside another bordered component")
- full: Optional[bool] = Field(None, description="Show with most visual options enabled. Used in insight scene.")
- kind: str = Field("SavedInsightNode", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ allowSorting: Optional[bool] = Field(
+ default=None, description="Can the user click on column headers to sort the table? (default: true)"
+ )
+ embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component")
+ expandable: Optional[bool] = Field(
+ default=None, description="Can expand row to show raw event data (default: true)"
+ )
+ full: Optional[bool] = Field(
+ default=None, description="Show with most visual options enabled. Used in insight scene."
+ )
+ kind: Literal["SavedInsightNode"] = "SavedInsightNode"
+ propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)")
shortId: str
+ showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row")
+ showColumnConfigurator: Optional[bool] = Field(
+ default=None, description="Show a button to configure the table's columns if possible"
+ )
showCorrelationTable: Optional[bool] = None
+ showDateRange: Optional[bool] = Field(default=None, description="Show date range selector")
+ showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query")
+ showEventFilter: Optional[bool] = Field(
+ default=None, description="Include an event filter above the table (EventsNode only)"
+ )
+ showExport: Optional[bool] = Field(default=None, description="Show the export button")
showFilters: Optional[bool] = None
showHeader: Optional[bool] = None
+ showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables")
showLastComputation: Optional[bool] = None
showLastComputationRefresh: Optional[bool] = None
+ showOpenEditorButton: Optional[bool] = Field(
+ default=None, description="Show a button to open the current query as a new insight. (default: true)"
+ )
+ showPersistentColumnConfigurator: Optional[bool] = Field(
+ default=None, description="Show a button to configure and persist the table's default columns if possible"
+ )
+ showPropertyFilter: Optional[bool] = Field(default=None, description="Include a property filter above the table")
+ showReload: Optional[bool] = Field(default=None, description="Show a reload button")
showResults: Optional[bool] = None
+ showResultsTable: Optional[bool] = Field(default=None, description="Show a results table")
+ showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries")
+ showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)")
showTable: Optional[bool] = None
+ showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown")
class SessionPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
- key: str = Field("$session_duration", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ key: Literal["$session_duration"] = "$session_duration"
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("session", const=True)
+ type: Literal["session"] = "session"
value: Optional[Union[str, float, List[Union[str, float]]]] = None
@@ -406,9 +431,9 @@ class StepOrderValue(str, Enum):
class StickinessFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
compare: Optional[bool] = None
display: Optional[ChartDisplayType] = None
hidden_legend_indexes: Optional[List[float]] = None
@@ -419,16 +444,16 @@ class Config:
class TimeToSeeDataSessionsQueryResponse(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
results: List[Dict[str, Any]]
class TrendsFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
aggregation_axis_format: Optional[AggregationAxisFormat] = None
aggregation_axis_postfix: Optional[str] = None
aggregation_axis_prefix: Optional[str] = None
@@ -445,18 +470,18 @@ class Config:
class Breakdown(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
normalize_url: Optional[bool] = None
property: Union[str, float]
type: BreakdownType
class BreakdownFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
breakdown: Optional[Union[str, float, List[Union[str, float]]]] = None
breakdown_group_type_index: Optional[float] = None
breakdown_histogram_bin_count: Optional[float] = None
@@ -466,31 +491,31 @@ class Config:
class ElementPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: Key
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("element", const=True)
+ type: Literal["element"] = "element"
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class EventPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: str
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("event", const=True, description="Event properties")
+ type: Literal["event"] = Field(default="event", description="Event properties")
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class EventsQueryResponse(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
columns: List
hasMore: Optional[bool] = None
results: List[List]
@@ -499,20 +524,20 @@ class Config:
class FeaturePropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: str
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("feature", const=True, description='Event property with "$feature/" prepended')
+ type: Literal["feature"] = Field(default="feature", description='Event property with "$feature/" prepended')
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class FunnelsFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
bin_count: Optional[Union[float, str]] = None
breakdown_attribution_type: Optional[BreakdownAttributionType] = None
breakdown_attribution_value: Optional[float] = None
@@ -538,21 +563,21 @@ class Config:
class GroupPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
group_type_index: Optional[float] = None
key: str
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("group", const=True)
+ type: Literal["group"] = "group"
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class HogQLMetadataResponse(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
errors: List[HogQLNotice]
inputExpr: Optional[str] = None
inputSelect: Optional[str] = None
@@ -563,19 +588,19 @@ class Config:
class HogQLPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: str
label: Optional[str] = None
- type: str = Field("hogql", const=True)
+ type: Literal["hogql"] = "hogql"
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class HogQLQueryResponse(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
clickhouse: Optional[str] = None
columns: Optional[List] = None
hogql: Optional[str] = None
@@ -586,29 +611,37 @@ class Config:
class LifecycleFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
show_values_on_series: Optional[bool] = None
shown_as: Optional[ShownAsValue] = None
toggledLifecycles: Optional[List[LifecycleToggle]] = None
-class PersonPropertyFilter(BaseModel):
- class Config:
- extra = Extra.forbid
+class LifecycleQueryResponse(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ result: List[Dict[str, Any]]
+ timings: Optional[List[QueryTiming]] = None
+
+class PersonPropertyFilter(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
key: str
label: Optional[str] = None
operator: PropertyOperator
- type: str = Field("person", const=True, description="Person properties")
+ type: Literal["person"] = Field(default="person", description="Person properties")
value: Optional[Union[str, float, List[Union[str, float]]]] = None
class RetentionFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
period: Optional[RetentionPeriod] = None
retention_reference: Optional[RetentionReference] = None
retention_type: Optional[RetentionType] = None
@@ -618,31 +651,31 @@ class Config:
class TimeToSeeDataSessionsQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
- kind: str = Field("TimeToSeeDataSessionsQuery", const=True)
- response: Optional[TimeToSeeDataSessionsQueryResponse] = Field(None, description="Cached query response")
- teamId: Optional[float] = Field(None, description="Project to filter on. Defaults to current project")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
+ kind: Literal["TimeToSeeDataSessionsQuery"] = "TimeToSeeDataSessionsQuery"
+ response: Optional[TimeToSeeDataSessionsQueryResponse] = Field(default=None, description="Cached query response")
+ teamId: Optional[float] = Field(default=None, description="Project to filter on. Defaults to current project")
class DatabaseSchemaQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- kind: str = Field("DatabaseSchemaQuery", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ kind: Literal["DatabaseSchemaQuery"] = "DatabaseSchemaQuery"
response: Optional[Dict[str, List[DatabaseSchemaQueryResponseField]]] = Field(
- None, description="Cached query response"
+ default=None, description="Cached query response"
)
class EventsNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
custom_name: Optional[str] = None
- event: Optional[str] = Field(None, description="The event or `null` for all events.")
+ event: Optional[str] = Field(default=None, description="The event or `null` for all events.")
fixedProperties: Optional[
List[
Union[
@@ -659,17 +692,19 @@ class Config:
]
]
] = Field(
- None,
+ default=None,
description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
)
- kind: str = Field("EventsNode", const=True)
+ kind: Literal["EventsNode"] = "EventsNode"
limit: Optional[float] = None
- math: Optional[Union[BaseMathType, PropertyMathType, CountPerActorMathType, str, str]] = None
- math_group_type_index: Optional[MathGroupTypeIndex1] = None
+ math: Optional[
+ Union[BaseMathType, PropertyMathType, CountPerActorMathType, Literal["unique_group"], Literal["hogql"]]
+ ] = None
+ math_group_type_index: Optional[MathGroupTypeIndex] = None
math_hogql: Optional[str] = None
math_property: Optional[str] = None
name: Optional[str] = None
- orderBy: Optional[List[str]] = Field(None, description="Columns to order by")
+ orderBy: Optional[List[str]] = Field(default=None, description="Columns to order by")
properties: Optional[
List[
Union[
@@ -685,18 +720,18 @@ class Config:
EmptyPropertyFilter,
]
]
- ] = Field(None, description="Properties configurable in the interface")
- response: Optional[Response] = Field(None, description="Return a limited set of data")
+ ] = Field(default=None, description="Properties configurable in the interface")
+ response: Optional[Response] = Field(default=None, description="Return a limited set of data")
class EventsQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- actionId: Optional[int] = Field(None, description="Show events matching a given action")
- after: Optional[str] = Field(None, description="Only fetch events that happened after this timestamp")
- before: Optional[str] = Field(None, description="Only fetch events that happened before this timestamp")
- event: Optional[str] = Field(None, description="Limit to events matching this string")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ actionId: Optional[int] = Field(default=None, description="Show events matching a given action")
+ after: Optional[str] = Field(default=None, description="Only fetch events that happened after this timestamp")
+ before: Optional[str] = Field(default=None, description="Only fetch events that happened before this timestamp")
+ event: Optional[str] = Field(default=None, description="Limit to events matching this string")
fixedProperties: Optional[
List[
Union[
@@ -713,14 +748,14 @@ class Config:
]
]
] = Field(
- None,
+ default=None,
description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
)
- kind: str = Field("EventsQuery", const=True)
- limit: Optional[int] = Field(None, description="Number of rows to return")
- offset: Optional[int] = Field(None, description="Number of rows to skip before returning rows")
- orderBy: Optional[List[str]] = Field(None, description="Columns to order by")
- personId: Optional[str] = Field(None, description="Show events for a given person")
+ kind: Literal["EventsQuery"] = "EventsQuery"
+ limit: Optional[int] = Field(default=None, description="Number of rows to return")
+ offset: Optional[int] = Field(default=None, description="Number of rows to skip before returning rows")
+ orderBy: Optional[List[str]] = Field(default=None, description="Columns to order by")
+ personId: Optional[str] = Field(default=None, description="Show events for a given person")
properties: Optional[
List[
Union[
@@ -736,16 +771,16 @@ class Config:
EmptyPropertyFilter,
]
]
- ] = Field(None, description="Properties configurable in the interface")
- response: Optional[EventsQueryResponse] = Field(None, description="Cached query response")
+ ] = Field(default=None, description="Properties configurable in the interface")
+ response: Optional[EventsQueryResponse] = Field(default=None, description="Cached query response")
select: List[str] = Field(..., description="Return a limited set of data. Required.")
- where: Optional[List[str]] = Field(None, description="HogQL filters to apply on returned data")
+ where: Optional[List[str]] = Field(default=None, description="HogQL filters to apply on returned data")
class HogQLFilters(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
dateRange: Optional[DateRange] = None
properties: Optional[
List[
@@ -766,30 +801,30 @@ class Config:
class HogQLMetadata(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
expr: Optional[str] = None
filters: Optional[HogQLFilters] = None
- kind: str = Field("HogQLMetadata", const=True)
- response: Optional[HogQLMetadataResponse] = Field(None, description="Cached query response")
+ kind: Literal["HogQLMetadata"] = "HogQLMetadata"
+ response: Optional[HogQLMetadataResponse] = Field(default=None, description="Cached query response")
select: Optional[str] = None
class HogQLQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
filters: Optional[HogQLFilters] = None
- kind: str = Field("HogQLQuery", const=True)
+ kind: Literal["HogQLQuery"] = "HogQLQuery"
query: str
- response: Optional[HogQLQueryResponse] = Field(None, description="Cached query response")
+ response: Optional[HogQLQueryResponse] = Field(default=None, description="Cached query response")
class PersonsNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
cohort: Optional[float] = None
distinctId: Optional[str] = None
fixedProperties: Optional[
@@ -808,10 +843,10 @@ class Config:
]
]
] = Field(
- None,
+ default=None,
description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
)
- kind: str = Field("PersonsNode", const=True)
+ kind: Literal["PersonsNode"] = "PersonsNode"
limit: Optional[float] = None
offset: Optional[float] = None
properties: Optional[
@@ -829,15 +864,15 @@ class Config:
EmptyPropertyFilter,
]
]
- ] = Field(None, description="Properties configurable in the interface")
- response: Optional[Dict[str, Any]] = Field(None, description="Cached query response")
+ ] = Field(default=None, description="Properties configurable in the interface")
+ response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response")
search: Optional[str] = None
class PropertyGroupFilterValue(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
type: FilterLogicalOperator
values: List[
Union[
@@ -859,9 +894,9 @@ class Config:
class ActionsNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
custom_name: Optional[str] = None
fixedProperties: Optional[
List[
@@ -879,12 +914,14 @@ class Config:
]
]
] = Field(
- None,
+ default=None,
description="Fixed properties in the query, can't be edited in the interface (e.g. scoping down by person)",
)
id: float
- kind: str = Field("ActionsNode", const=True)
- math: Optional[Union[BaseMathType, PropertyMathType, CountPerActorMathType, str, str]] = None
+ kind: Literal["ActionsNode"] = "ActionsNode"
+ math: Optional[
+ Union[BaseMathType, PropertyMathType, CountPerActorMathType, Literal["unique_group"], Literal["hogql"]]
+ ] = None
math_group_type_index: Optional[MathGroupTypeIndex] = None
math_hogql: Optional[str] = None
math_property: Optional[str] = None
@@ -904,74 +941,76 @@ class Config:
EmptyPropertyFilter,
]
]
- ] = Field(None, description="Properties configurable in the interface")
- response: Optional[Dict[str, Any]] = Field(None, description="Cached query response")
+ ] = Field(default=None, description="Properties configurable in the interface")
+ response: Optional[Dict[str, Any]] = Field(default=None, description="Cached query response")
class DataTableNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
allowSorting: Optional[bool] = Field(
- None, description="Can the user click on column headers to sort the table? (default: true)"
+ default=None, description="Can the user click on column headers to sort the table? (default: true)"
)
columns: Optional[List[str]] = Field(
- None, description="Columns shown in the table, unless the `source` provides them."
+ default=None, description="Columns shown in the table, unless the `source` provides them."
+ )
+ embedded: Optional[bool] = Field(default=None, description="Uses the embedded version of LemonTable")
+ expandable: Optional[bool] = Field(
+ default=None, description="Can expand row to show raw event data (default: true)"
)
- embedded: Optional[bool] = Field(None, description="Uses the embedded version of LemonTable")
- expandable: Optional[bool] = Field(None, description="Can expand row to show raw event data (default: true)")
- full: Optional[bool] = Field(None, description="Show with most visual options enabled. Used in scenes.")
+ full: Optional[bool] = Field(default=None, description="Show with most visual options enabled. Used in scenes.")
hiddenColumns: Optional[List[str]] = Field(
- None, description="Columns that aren't shown in the table, even if in columns or returned data"
+ default=None, description="Columns that aren't shown in the table, even if in columns or returned data"
)
- kind: str = Field("DataTableNode", const=True)
- propertiesViaUrl: Optional[bool] = Field(None, description="Link properties via the URL (default: false)")
- showActions: Optional[bool] = Field(None, description="Show the kebab menu at the end of the row")
+ kind: Literal["DataTableNode"] = "DataTableNode"
+ propertiesViaUrl: Optional[bool] = Field(default=None, description="Link properties via the URL (default: false)")
+ showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row")
showColumnConfigurator: Optional[bool] = Field(
- None, description="Show a button to configure the table's columns if possible"
+ default=None, description="Show a button to configure the table's columns if possible"
)
- showDateRange: Optional[bool] = Field(None, description="Show date range selector")
- showElapsedTime: Optional[bool] = Field(None, description="Show the time it takes to run a query")
+ showDateRange: Optional[bool] = Field(default=None, description="Show date range selector")
+ showElapsedTime: Optional[bool] = Field(default=None, description="Show the time it takes to run a query")
showEventFilter: Optional[bool] = Field(
- None, description="Include an event filter above the table (EventsNode only)"
+ default=None, description="Include an event filter above the table (EventsNode only)"
)
- showExport: Optional[bool] = Field(None, description="Show the export button")
- showHogQLEditor: Optional[bool] = Field(None, description="Include a HogQL query editor above HogQL tables")
+ showExport: Optional[bool] = Field(default=None, description="Show the export button")
+ showHogQLEditor: Optional[bool] = Field(default=None, description="Include a HogQL query editor above HogQL tables")
showOpenEditorButton: Optional[bool] = Field(
- None, description="Show a button to open the current query as a new insight. (default: true)"
+ default=None, description="Show a button to open the current query as a new insight. (default: true)"
)
showPersistentColumnConfigurator: Optional[bool] = Field(
- None, description="Show a button to configure and persist the table's default columns if possible"
- )
- showPropertyFilter: Optional[bool] = Field(None, description="Include a property filter above the table")
- showReload: Optional[bool] = Field(None, description="Show a reload button")
- showResultsTable: Optional[bool] = Field(None, description="Show a results table")
- showSavedQueries: Optional[bool] = Field(None, description="Shows a list of saved queries")
- showSearch: Optional[bool] = Field(None, description="Include a free text search field (PersonsNode only)")
- showTimings: Optional[bool] = Field(None, description="Show a detailed query timing breakdown")
+ default=None, description="Show a button to configure and persist the table's default columns if possible"
+ )
+ showPropertyFilter: Optional[bool] = Field(default=None, description="Include a property filter above the table")
+ showReload: Optional[bool] = Field(default=None, description="Show a reload button")
+ showResultsTable: Optional[bool] = Field(default=None, description="Show a results table")
+ showSavedQueries: Optional[bool] = Field(default=None, description="Shows a list of saved queries")
+ showSearch: Optional[bool] = Field(default=None, description="Include a free text search field (PersonsNode only)")
+ showTimings: Optional[bool] = Field(default=None, description="Show a detailed query timing breakdown")
source: Union[EventsNode, EventsQuery, PersonsNode, HogQLQuery, TimeToSeeDataSessionsQuery] = Field(
..., description="Source of the events"
)
class PropertyGroupFilter(BaseModel):
- class Config:
- extra = Extra.forbid
-
+ model_config = ConfigDict(
+ extra="forbid",
+ )
type: FilterLogicalOperator
values: List[PropertyGroupFilterValue]
class RetentionQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
)
- kind: str = Field("RetentionQuery", const=True)
+ kind: Literal["RetentionQuery"] = "RetentionQuery"
properties: Optional[
Union[
List[
@@ -990,24 +1029,26 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- retentionFilter: Optional[RetentionFilter] = Field(None, description="Properties specific to the retention insight")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ retentionFilter: Optional[RetentionFilter] = Field(
+ default=None, description="Properties specific to the retention insight"
+ )
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
class StickinessQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
)
interval: Optional[IntervalType] = Field(
- None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
+ default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
)
- kind: str = Field("StickinessQuery", const=True)
+ kind: Literal["StickinessQuery"] = "StickinessQuery"
properties: Optional[
Union[
List[
@@ -1026,28 +1067,28 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
stickinessFilter: Optional[StickinessFilter] = Field(
- None, description="Properties specific to the stickiness insight"
+ default=None, description="Properties specific to the stickiness insight"
)
class TrendsQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- breakdown: Optional[BreakdownFilter] = Field(None, description="Breakdown of the events and actions")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ breakdown: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
)
interval: Optional[IntervalType] = Field(
- None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
+ default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
)
- kind: str = Field("TrendsQuery", const=True)
+ kind: Literal["TrendsQuery"] = "TrendsQuery"
properties: Optional[
Union[
List[
@@ -1066,27 +1107,29 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
- trendsFilter: Optional[TrendsFilter] = Field(None, description="Properties specific to the trends insight")
+ trendsFilter: Optional[TrendsFilter] = Field(default=None, description="Properties specific to the trends insight")
class FunnelsQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- breakdown: Optional[BreakdownFilter] = Field(None, description="Breakdown of the events and actions")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ breakdown: Optional[BreakdownFilter] = Field(default=None, description="Breakdown of the events and actions")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
+ )
+ funnelsFilter: Optional[FunnelsFilter] = Field(
+ default=None, description="Properties specific to the funnels insight"
)
- funnelsFilter: Optional[FunnelsFilter] = Field(None, description="Properties specific to the funnels insight")
interval: Optional[IntervalType] = Field(
- None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
+ default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
)
- kind: str = Field("FunnelsQuery", const=True)
+ kind: Literal["FunnelsQuery"] = "FunnelsQuery"
properties: Optional[
Union[
List[
@@ -1105,25 +1148,27 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
class LifecycleQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
)
interval: Optional[IntervalType] = Field(
- None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
+ default=None, description="Granularity of the response. Can be one of `hour`, `day`, `week` or `month`"
+ )
+ kind: Literal["LifecycleQuery"] = "LifecycleQuery"
+ lifecycleFilter: Optional[LifecycleFilter] = Field(
+ default=None, description="Properties specific to the lifecycle insight"
)
- kind: str = Field("LifecycleQuery", const=True)
- lifecycleFilter: Optional[LifecycleFilter] = Field(None, description="Properties specific to the lifecycle insight")
properties: Optional[
Union[
List[
@@ -1142,22 +1187,23 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ response: Optional[LifecycleQueryResponse] = None
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
class PathsQuery(BaseModel):
- class Config:
- extra = Extra.forbid
-
- aggregation_group_type_index: Optional[float] = Field(None, description="Groups aggregation")
- dateRange: Optional[DateRange] = Field(None, description="Date range for the query")
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ aggregation_group_type_index: Optional[float] = Field(default=None, description="Groups aggregation")
+ dateRange: Optional[DateRange] = Field(default=None, description="Date range for the query")
filterTestAccounts: Optional[bool] = Field(
- None, description="Exclude internal and test users by applying the respective filters"
+ default=None, description="Exclude internal and test users by applying the respective filters"
)
- kind: str = Field("PathsQuery", const=True)
- pathsFilter: Optional[PathsFilter] = Field(None, description="Properties specific to the paths insight")
+ kind: Literal["PathsQuery"] = "PathsQuery"
+ pathsFilter: Optional[PathsFilter] = Field(default=None, description="Properties specific to the paths insight")
properties: Optional[
Union[
List[
@@ -1176,17 +1222,19 @@ class Config:
],
PropertyGroupFilter,
]
- ] = Field(None, description="Property filters for all series")
- samplingFactor: Optional[float] = Field(None, description="Sampling rate")
+ ] = Field(default=None, description="Property filters for all series")
+ samplingFactor: Optional[float] = Field(default=None, description="Sampling rate")
class InsightVizNode(BaseModel):
- class Config:
- extra = Extra.forbid
-
- embedded: Optional[bool] = Field(None, description="Query is embedded inside another bordered component")
- full: Optional[bool] = Field(None, description="Show with most visual options enabled. Used in insight scene.")
- kind: str = Field("InsightVizNode", const=True)
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ embedded: Optional[bool] = Field(default=None, description="Query is embedded inside another bordered component")
+ full: Optional[bool] = Field(
+ default=None, description="Show with most visual options enabled. Used in insight scene."
+ )
+ kind: Literal["InsightVizNode"] = "InsightVizNode"
showCorrelationTable: Optional[bool] = None
showFilters: Optional[bool] = None
showHeader: Optional[bool] = None
@@ -1197,8 +1245,8 @@ class Config:
source: Union[TrendsQuery, FunnelsQuery, RetentionQuery, PathsQuery, StickinessQuery, LifecycleQuery]
-class Model(BaseModel):
- __root__: Union[
+class Model(RootModel):
+ root: Union[
DataTableNode,
SavedInsightNode,
InsightVizNode,
@@ -1214,4 +1262,4 @@ class Model(BaseModel):
]
-PropertyGroupFilterValue.update_forward_refs()
+PropertyGroupFilterValue.model_rebuild()
diff --git a/posthog/session_recordings/__init__.py b/posthog/session_recordings/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/session_recordings/models/__init__.py b/posthog/session_recordings/models/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/models/session_recording/metadata.py b/posthog/session_recordings/models/metadata.py
similarity index 100%
rename from posthog/models/session_recording/metadata.py
rename to posthog/session_recordings/models/metadata.py
diff --git a/posthog/models/session_recording/session_recording.py b/posthog/session_recordings/models/session_recording.py
similarity index 96%
rename from posthog/models/session_recording/session_recording.py
rename to posthog/session_recordings/models/session_recording.py
index f8eded26d9c5b..65dd13b913257 100644
--- a/posthog/models/session_recording/session_recording.py
+++ b/posthog/session_recordings/models/session_recording.py
@@ -6,15 +6,15 @@
from posthog.celery import ee_persist_single_recording
from posthog.models.person.person import Person
-from posthog.models.session_recording.metadata import (
+from posthog.session_recordings.models.metadata import (
DecompressedRecordingData,
RecordingMatchingEvents,
RecordingMetadata,
)
-from posthog.models.session_recording_event.session_recording_event import SessionRecordingViewed
+from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
from posthog.models.team.team import Team
from posthog.models.utils import UUIDModel
-from posthog.queries.session_recordings.session_replay_events import SessionReplayEvents
+from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
from django.conf import settings
@@ -98,7 +98,7 @@ def load_metadata(self) -> bool:
return True
def load_snapshots(self, limit=20, offset=0) -> None:
- from posthog.queries.session_recordings.session_recording_events import SessionRecordingEvents
+ from posthog.session_recordings.queries.session_recording_events import SessionRecordingEvents
if self._snapshots:
return
@@ -117,7 +117,7 @@ def load_object_data(self) -> None:
This is only called in the to-be deprecated v1 of session recordings snapshot API
"""
try:
- from ee.models.session_recording_extensions import load_persisted_recording
+ from ee.session_recordings.session_recording_extensions import load_persisted_recording
except ImportError:
load_persisted_recording = lambda *args: None
diff --git a/posthog/models/session_recording_event/session_recording_event.py b/posthog/session_recordings/models/session_recording_event.py
similarity index 100%
rename from posthog/models/session_recording_event/session_recording_event.py
rename to posthog/session_recordings/models/session_recording_event.py
diff --git a/posthog/models/session_recording_playlist/session_recording_playlist.py b/posthog/session_recordings/models/session_recording_playlist.py
similarity index 100%
rename from posthog/models/session_recording_playlist/session_recording_playlist.py
rename to posthog/session_recordings/models/session_recording_playlist.py
diff --git a/posthog/models/session_recording_playlist_item/session_recording_playlist_item.py b/posthog/session_recordings/models/session_recording_playlist_item.py
similarity index 100%
rename from posthog/models/session_recording_playlist_item/session_recording_playlist_item.py
rename to posthog/session_recordings/models/session_recording_playlist_item.py
diff --git a/posthog/models/session_recording_event/util.py b/posthog/session_recordings/models/system_status_queries.py
similarity index 100%
rename from posthog/models/session_recording_event/util.py
rename to posthog/session_recordings/models/system_status_queries.py
diff --git a/posthog/session_recordings/queries/__init__.py b/posthog/session_recordings/queries/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/queries/session_query.py b/posthog/session_recordings/queries/session_query.py
similarity index 100%
rename from posthog/queries/session_query.py
rename to posthog/session_recordings/queries/session_query.py
diff --git a/posthog/queries/session_recordings/session_recording_events.py b/posthog/session_recordings/queries/session_recording_events.py
similarity index 98%
rename from posthog/queries/session_recordings/session_recording_events.py
rename to posthog/session_recordings/queries/session_recording_events.py
index 7a6975c3ef69d..826fb2a770ab0 100644
--- a/posthog/queries/session_recordings/session_recording_events.py
+++ b/posthog/session_recordings/queries/session_recording_events.py
@@ -4,7 +4,7 @@
from posthog.client import sync_execute
from posthog.models import Team
-from posthog.models.session_recording.metadata import (
+from posthog.session_recordings.models.metadata import (
DecompressedRecordingData,
SessionRecordingEvent,
SnapshotDataTaggedWithWindowId,
diff --git a/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
similarity index 100%
rename from posthog/queries/session_recordings/session_recording_list_from_replay_summary.py
rename to posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
diff --git a/posthog/queries/session_recordings/session_recording_properties.py b/posthog/session_recordings/queries/session_recording_properties.py
similarity index 100%
rename from posthog/queries/session_recordings/session_recording_properties.py
rename to posthog/session_recordings/queries/session_recording_properties.py
diff --git a/posthog/queries/session_recordings/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py
similarity index 97%
rename from posthog/queries/session_recordings/session_replay_events.py
rename to posthog/session_recordings/queries/session_replay_events.py
index 3523be4e47101..6521e9f39fdb2 100644
--- a/posthog/queries/session_recordings/session_replay_events.py
+++ b/posthog/session_recordings/queries/session_replay_events.py
@@ -3,7 +3,7 @@
from posthog.clickhouse.client import sync_execute
from posthog.models.team import Team
-from posthog.models.session_recording.metadata import (
+from posthog.session_recordings.models.metadata import (
RecordingMetadata,
)
diff --git a/posthog/session_recordings/queries/test/__init__.py b/posthog/session_recordings/queries/test/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/queries/session_recordings/test/__snapshots__/test_session_recording_list_from_session_replay.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_session_replay.ambr
similarity index 100%
rename from posthog/queries/session_recordings/test/__snapshots__/test_session_recording_list_from_session_replay.ambr
rename to posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_session_replay.ambr
diff --git a/posthog/queries/session_recordings/test/__snapshots__/test_session_recording_properties.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_properties.ambr
similarity index 100%
rename from posthog/queries/session_recordings/test/__snapshots__/test_session_recording_properties.ambr
rename to posthog/session_recordings/queries/test/__snapshots__/test_session_recording_properties.ambr
diff --git a/posthog/queries/session_recordings/test/__snapshots__/test_session_replay_summaries.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_replay_summaries.ambr
similarity index 100%
rename from posthog/queries/session_recordings/test/__snapshots__/test_session_replay_summaries.ambr
rename to posthog/session_recordings/queries/test/__snapshots__/test_session_replay_summaries.ambr
diff --git a/posthog/queries/session_recordings/test/session_replay_sql.py b/posthog/session_recordings/queries/test/session_replay_sql.py
similarity index 100%
rename from posthog/queries/session_recordings/test/session_replay_sql.py
rename to posthog/session_recordings/queries/test/session_replay_sql.py
diff --git a/posthog/queries/session_recordings/test/test_session_recording.py b/posthog/session_recordings/queries/test/test_session_recording.py
similarity index 90%
rename from posthog/queries/session_recordings/test/test_session_recording.py
rename to posthog/session_recordings/queries/test/test_session_recording.py
index 7f55a50eb3cf2..28f992fabebd8 100644
--- a/posthog/queries/session_recordings/test/test_session_recording.py
+++ b/posthog/session_recordings/queries/test/test_session_recording.py
@@ -8,7 +8,7 @@
from posthog.models import Filter
from posthog.models.team import Team
-from posthog.queries.session_recordings.session_recording_events import SessionRecordingEvents
+from posthog.session_recordings.queries.session_recording_events import SessionRecordingEvents
from posthog.session_recordings.session_recording_helpers import (
DecompressedRecordingData,
)
@@ -75,10 +75,11 @@ def test_get_snapshots(self):
)
filter = create_recording_filter("1")
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id="1"
).get_snapshots(filter.limit, filter.offset)
+ assert recording is not None
self.assertEqual(
recording["snapshot_data_by_window_id"],
{
@@ -116,10 +117,11 @@ def test_get_snapshots_does_not_leak_teams(self):
)
filter = create_recording_filter("1")
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id="1"
).get_snapshots(filter.limit, filter.offset)
+ assert recording is not None
self.assertEqual(
recording["snapshot_data_by_window_id"],
{"": [{"data": {"source": 0}, "timestamp": 1600000000000, "type": 3}]},
@@ -127,10 +129,11 @@ def test_get_snapshots_does_not_leak_teams(self):
def test_get_snapshots_with_no_such_session(self):
filter = create_recording_filter("xxx")
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id="xxx"
).get_snapshots(filter.limit, filter.offset)
- assert not recording
+
+ assert recording is None
def test_get_chunked_snapshots(self):
with freeze_time("2020-09-13T12:26:40.000Z"):
@@ -149,9 +152,11 @@ def test_get_chunked_snapshots(self):
)
filter = create_recording_filter(chunked_session_id)
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id=chunked_session_id
).get_snapshots(limit, filter.offset)
+
+ assert recording is not None
self.assertEqual(len(recording["snapshot_data_by_window_id"][""]), limit * snapshots_per_chunk)
self.assertTrue(recording["has_next"])
@@ -173,10 +178,11 @@ def test_get_chunked_snapshots_with_specific_limit_and_offset(self):
)
filter = create_recording_filter(chunked_session_id, limit, offset)
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id=chunked_session_id
).get_snapshots(limit, filter.offset)
+ assert recording is not None
self.assertEqual(len(recording["snapshot_data_by_window_id"][""]), limit * snapshots_per_chunk)
self.assertEqual(recording["snapshot_data_by_window_id"][""][0]["timestamp"], 1_600_000_300_000)
self.assertTrue(recording["has_next"])
@@ -207,8 +213,9 @@ def test_get_snapshots_with_date_filter(self):
filter = create_recording_filter(
"1",
)
- recording: DecompressedRecordingData = SessionRecordingEvents(
+ recording: DecompressedRecordingData | None = SessionRecordingEvents(
team=self.team, session_recording_id="1", recording_start_time=now()
).get_snapshots(filter.limit, filter.offset)
+ assert recording is not None
self.assertEqual(len(recording["snapshot_data_by_window_id"][""]), 1)
diff --git a/posthog/session_recordings/queries/test/test_session_recording_list.py b/posthog/session_recordings/queries/test/test_session_recording_list.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
similarity index 99%
rename from posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py
rename to posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
index 88484f316e150..6a529313a3851 100644
--- a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py
+++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py
@@ -12,13 +12,13 @@
from posthog.models.action import Action
from posthog.models.action_step import ActionStep
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
-from posthog.models.session_replay_event.sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL
+from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL
from posthog.models.team import Team
-from posthog.queries.session_recordings.session_recording_list_from_replay_summary import (
+from posthog.session_recordings.queries.session_recording_list_from_replay_summary import (
SessionRecordingListFromReplaySummary,
ttl_days,
)
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
diff --git a/posthog/queries/session_recordings/test/test_session_recording_properties.py b/posthog/session_recordings/queries/test/test_session_recording_properties.py
similarity index 98%
rename from posthog/queries/session_recordings/test/test_session_recording_properties.py
rename to posthog/session_recordings/queries/test/test_session_recording_properties.py
index 1d553541fe5fa..387d41bbe1ebc 100644
--- a/posthog/queries/session_recordings/test/test_session_recording_properties.py
+++ b/posthog/session_recordings/queries/test/test_session_recording_properties.py
@@ -4,7 +4,7 @@
from posthog.models import Person
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
-from posthog.queries.session_recordings.session_recording_properties import SessionRecordingProperties
+from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties
from posthog.session_recordings.test.test_factory import create_snapshot
from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries
diff --git a/posthog/queries/session_recordings/test/test_session_replay_events.py b/posthog/session_recordings/queries/test/test_session_replay_events.py
similarity index 95%
rename from posthog/queries/session_recordings/test/test_session_replay_events.py
rename to posthog/session_recordings/queries/test/test_session_replay_events.py
index 873e741b3fc48..c304233ff98d4 100644
--- a/posthog/queries/session_recordings/test/test_session_replay_events.py
+++ b/posthog/session_recordings/queries/test/test_session_replay_events.py
@@ -1,6 +1,6 @@
from posthog.models import Team
-from posthog.queries.session_recordings.session_replay_events import SessionReplayEvents
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.test.base import ClickhouseTestMixin, APIBaseTest
from dateutil.relativedelta import relativedelta
from django.utils.timezone import now
diff --git a/posthog/queries/session_recordings/test/test_session_replay_summaries.py b/posthog/session_recordings/queries/test/test_session_replay_summaries.py
similarity index 98%
rename from posthog/queries/session_recordings/test/test_session_replay_summaries.py
rename to posthog/session_recordings/queries/test/test_session_replay_summaries.py
index 0b3e361fa9511..5a1e9b94db842 100644
--- a/posthog/queries/session_recordings/test/test_session_replay_summaries.py
+++ b/posthog/session_recordings/queries/test/test_session_replay_summaries.py
@@ -9,7 +9,7 @@
from posthog.models import Team
from posthog.models.event.util import format_clickhouse_timestamp
from posthog.queries.app_metrics.serializers import AppMetricsRequestSerializer
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries
diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py
index ea19b3b405a2b..20e8a0846440c 100644
--- a/posthog/session_recordings/realtime_snapshots.py
+++ b/posthog/session_recordings/realtime_snapshots.py
@@ -72,8 +72,8 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
except Exception as e:
# very broad capture to see if there are any unexpected errors
capture_exception(
- "get_realtime_snapshots_failed",
- extras={"attempt_count": attempt_count},
+ e,
+ extras={"attempt_count": attempt_count, "operation": "get_realtime_snapshots"},
tags={"team_id": team_id, "session_id": session_id},
)
raise e
diff --git a/posthog/api/session_recording.py b/posthog/session_recordings/session_recording_api.py
similarity index 98%
rename from posthog/api/session_recording.py
rename to posthog/session_recordings/session_recording_api.py
index a5e5bf7fb937a..41a668083b534 100644
--- a/posthog/api/session_recording.py
+++ b/posthog/session_recordings/session_recording_api.py
@@ -25,19 +25,19 @@
from posthog.models import Filter, User
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
from posthog.models.person.person import PersonDistinctId
-from posthog.models.session_recording.session_recording import SessionRecording
-from posthog.models.session_recording_event import SessionRecordingViewed
+from posthog.session_recordings.models.session_recording import SessionRecording
from posthog.permissions import (
ProjectMembershipNecessaryPermissions,
SharingTokenPermission,
TeamMemberAccessPermission,
)
+from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
-from posthog.queries.session_recordings.session_recording_list_from_replay_summary import (
+from posthog.session_recordings.queries.session_recording_list_from_replay_summary import (
SessionRecordingListFromReplaySummary,
SessionIdEventsQuery,
)
-from posthog.queries.session_recordings.session_recording_properties import SessionRecordingProperties
+from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties
from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle
from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots
from posthog.storage import object_storage
@@ -317,7 +317,7 @@ def _snapshots_v2(self, request: request.Request):
response_data["sources"] = sources
elif source == "realtime":
- snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=recording.session_id) or []
+ snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=str(recording.session_id)) or []
event_properties["source"] = "realtime"
event_properties["snapshots_length"] = len(snapshots)
diff --git a/posthog/session_recordings/session_recording_helpers.py b/posthog/session_recordings/session_recording_helpers.py
index 03cb4e71dbd8f..960ac0021c817 100644
--- a/posthog/session_recordings/session_recording_helpers.py
+++ b/posthog/session_recordings/session_recording_helpers.py
@@ -3,13 +3,13 @@
import json
from collections import defaultdict
from datetime import datetime, timezone
-from typing import Any, Callable, DefaultDict, Dict, Generator, List, Optional
+from typing import Any, Callable, DefaultDict, Dict, Generator, List, Optional, Tuple
from dateutil.parser import ParserError, parse
from sentry_sdk.api import capture_exception
from posthog.models import utils
-from posthog.models.session_recording.metadata import (
+from posthog.session_recordings.models.metadata import (
DecompressedRecordingData,
SessionRecordingEventSummary,
SnapshotData,
@@ -126,7 +126,7 @@ def legacy_compress_and_chunk_snapshots(events: List[Event], chunk_size=512 * 10
}
-def split_replay_events(events: List[Event]) -> List[Event]:
+def split_replay_events(events: List[Event]) -> Tuple[List[Event], List[Event]]:
replay, other = [], []
for event in events:
@@ -139,7 +139,9 @@ def preprocess_replay_events_for_blob_ingestion(events: List[Event], max_size_by
return _process_windowed_events(events, lambda x: preprocess_replay_events(x, max_size_bytes=max_size_bytes))
-def preprocess_replay_events(events: List[Event], max_size_bytes=1024 * 1024) -> List[Event]:
+def preprocess_replay_events(
+ _events: List[Event] | Generator[Event, None, None], max_size_bytes=1024 * 1024
+) -> Generator[Event, None, None]:
"""
The events going to blob ingestion are uncompressed (the compression happens in the Kafka producer)
1. Since posthog-js {version} we are grouping events on the frontend in a batch and passing their size in $snapshot_bytes
@@ -149,8 +151,14 @@ def preprocess_replay_events(events: List[Event], max_size_bytes=1024 * 1024) ->
3. If not, we split out the "full snapshots" from the rest (they are typically bigger) and send them individually, trying one more time to group the rest, otherwise sending them individually
"""
+ if isinstance(_events, Generator):
+ # we check the first item in the events below so need to be dealing with a list
+ events = list(_events)
+ else:
+ events = _events
+
if len(events) == 0:
- return []
+ return
size_with_headroom = max_size_bytes * 0.95 # Leave 5% headroom
@@ -158,7 +166,7 @@ def preprocess_replay_events(events: List[Event], max_size_bytes=1024 * 1024) ->
session_id = events[0]["properties"]["$session_id"]
window_id = events[0]["properties"].get("$window_id")
- def new_event(items: List[dict] = None) -> Event:
+ def new_event(items: List[dict] | None = None) -> Event:
return {
**events[0],
"event": "$snapshot_items", # New event name to avoid confusion with the old $snapshot event
@@ -173,7 +181,7 @@ def new_event(items: List[dict] = None) -> Event:
# 1. Group by $snapshot_bytes if any of the events have it
if events[0]["properties"].get("$snapshot_bytes"):
- current_event = None
+ current_event: Dict | None = None
current_event_size = 0
for event in events:
@@ -191,7 +199,8 @@ def new_event(items: List[dict] = None) -> Event:
current_event["properties"]["$snapshot_items"].extend(additional_data)
current_event_size += additional_bytes
- yield current_event
+ if current_event:
+ yield current_event
else:
snapshot_data_list = list(flatten([event["properties"]["$snapshot_data"] for event in events], max_depth=1))
@@ -226,11 +235,13 @@ def new_event(items: List[dict] = None) -> Event:
yield event
-def _process_windowed_events(events: List[Event], fn: Callable[[List[Event], Any], List[Event]]) -> List[Event]:
+def _process_windowed_events(
+ events: List[Event], fn: Callable[[List[Any]], Generator[Event, None, None]]
+) -> List[Event]:
"""
Helper method to simplify grouping events by window_id and session_id, processing them with the given function, and then returning the flattened list
"""
- result = []
+ result: List[Event] = []
snapshots_by_session_and_window_id = defaultdict(list)
for event in events:
@@ -315,7 +326,7 @@ def decompress_chunked_snapshot_data(
event["snapshot_data"]["events_summary"] if return_only_activity_data else decompressed_items
)
else:
- # Really old format where the event is just a single raw rrweb event
+ # Old format where the event is just a single raw rrweb event
snapshot_data_by_window_id[event["window_id"]].append(
get_events_summary_from_snapshot_data([event["snapshot_data"]])[0]
if return_only_activity_data
@@ -339,7 +350,7 @@ def decompress_chunked_snapshot_data(
if len(chunks) == event["snapshot_data"]["chunk_count"]:
count += 1
- chunks_collector[event["snapshot_data"]["chunk_id"]] = None
+ chunks_collector[event["snapshot_data"]["chunk_id"]] = []
# Somehow mark this chunk_id as processed...
processed_chunk_ids.add(event["snapshot_data"]["chunk_id"])
@@ -395,7 +406,9 @@ def convert_to_timestamp(source: str) -> int:
return int(parse(source).timestamp() * 1000)
-def get_events_summary_from_snapshot_data(snapshot_data: List[SnapshotData]) -> List[SessionRecordingEventSummary]:
+def get_events_summary_from_snapshot_data(
+ snapshot_data: List[SnapshotData | None],
+) -> List[SessionRecordingEventSummary]:
"""
Extract a minimal representation of the snapshot data events for easier querying.
'data' and 'data.payload' values are included as long as they are strings or numbers
diff --git a/posthog/session_recordings/sql/__init__.py b/posthog/session_recordings/sql/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/models/session_recording_event/sql.py b/posthog/session_recordings/sql/session_recording_event_sql.py
similarity index 100%
rename from posthog/models/session_recording_event/sql.py
rename to posthog/session_recordings/sql/session_recording_event_sql.py
diff --git a/posthog/models/session_replay_event/migrations_sql.py b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
similarity index 66%
rename from posthog/models/session_replay_event/migrations_sql.py
rename to posthog/session_recordings/sql/session_replay_event_migrations_sql.py
index 09f4e300be624..ac897fccc1d08 100644
--- a/posthog/models/session_replay_event/migrations_sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
@@ -1,6 +1,6 @@
from django.conf import settings
-from posthog.models.session_replay_event.sql import SESSION_REPLAY_EVENTS_DATA_TABLE
+from posthog.session_recordings.sql.session_replay_event_sql import SESSION_REPLAY_EVENTS_DATA_TABLE
DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL = (
lambda: "DROP TABLE IF EXISTS session_replay_events_mv ON CLUSTER {cluster}".format(
@@ -65,3 +65,29 @@
table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
cluster=settings.CLICKHOUSE_CLUSTER,
)
+
+# migration to add size column to the session replay table
+ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN = """
+ ALTER TABLE {table_name} on CLUSTER '{cluster}'
+ ADD COLUMN IF NOT EXISTS message_count SimpleAggregateFunction(sum, Int64),
+ ADD COLUMN IF NOT EXISTS event_count SimpleAggregateFunction(sum, Int64),
+ -- fly by addition so that we can track lag in the data the same way as for other tables
+ ADD COLUMN IF NOT EXISTS _timestamp SimpleAggregateFunction(max, DateTime)
+"""
+
+ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = (
+ lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ )
+)
+
+ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="writable_session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
+
+ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
diff --git a/posthog/models/session_replay_event/sql.py b/posthog/session_recordings/sql/session_replay_event_sql.py
similarity index 88%
rename from posthog/models/session_replay_event/sql.py
rename to posthog/session_recordings/sql/session_replay_event_sql.py
index 1221fd80bb6de..dfe839843979f 100644
--- a/posthog/models/session_replay_event/sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_sql.py
@@ -27,7 +27,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = {engine}
"""
@@ -53,7 +55,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = {engine}
"""
@@ -117,7 +127,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
-sum(size) as size
+sum(size) as size,
+-- we can count the number of kafka messages instead of sending it explicitly
+sum(message_count) as message_count,
+sum(event_count) as event_count,
+max(_timestamp) as _timestamp
FROM {database}.kafka_session_replay_events
group by session_id, team_id
""".format(
diff --git a/posthog/session_recordings/test/__init__.py b/posthog/session_recordings/test/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/api/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
similarity index 94%
rename from posthog/api/test/__snapshots__/test_session_recordings.ambr
rename to posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
index e97965ce54b8f..a9f66096059e3 100644
--- a/posthog/api/test/__snapshots__/test_session_recordings.ambr
+++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
@@ -119,10 +119,10 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
- '1')
+ '1',
+ '3',
+ '2')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -477,11 +477,11 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
- '6',
- '1')
+ '1',
+ '3',
+ '2',
+ '6')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -848,12 +848,12 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
+ '1',
+ '3',
+ '2',
'7',
- '6',
- '1')
+ '6')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -1258,13 +1258,13 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
- '7',
- '6',
'1',
- '8')
+ '8',
+ '3',
+ '2',
+ '7',
+ '6')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -1684,14 +1684,14 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
- '7',
- '6',
'1',
'8',
- '9')
+ '3',
+ '2',
+ '7',
+ '9',
+ '6')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2079,15 +2079,15 @@
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
'4',
- '7',
- '10',
- '6',
'1',
'8',
- '9')
+ '3',
+ '2',
+ '7',
+ '9',
+ '6',
+ '10')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2147,61 +2147,6 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.195
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.196
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.197
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.198
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.199
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.2
'
SELECT "posthog_organizationmembership"."id",
@@ -2242,122 +2187,6 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.200
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.201
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.202
- '
- SELECT "posthog_sessionrecording"."id",
- "posthog_sessionrecording"."session_id",
- "posthog_sessionrecording"."team_id",
- "posthog_sessionrecording"."created_at",
- "posthog_sessionrecording"."deleted",
- "posthog_sessionrecording"."object_storage_path",
- "posthog_sessionrecording"."distinct_id",
- "posthog_sessionrecording"."duration",
- "posthog_sessionrecording"."active_seconds",
- "posthog_sessionrecording"."inactive_seconds",
- "posthog_sessionrecording"."start_time",
- "posthog_sessionrecording"."end_time",
- "posthog_sessionrecording"."click_count",
- "posthog_sessionrecording"."keypress_count",
- "posthog_sessionrecording"."mouse_activity_count",
- "posthog_sessionrecording"."console_log_count",
- "posthog_sessionrecording"."console_warn_count",
- "posthog_sessionrecording"."console_error_count",
- "posthog_sessionrecording"."start_url",
- "posthog_sessionrecording"."storage_version",
- COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count"
- FROM "posthog_sessionrecording"
- LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
- WHERE ("posthog_sessionrecording"."session_id" IN ('5',
- '2',
- '3',
- '4',
- '7',
- '10',
- '6',
- '1',
- '8',
- '9')
- AND "posthog_sessionrecording"."team_id" = 2)
- GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.203
- '
- SELECT "posthog_sessionrecordingviewed"."session_id"
- FROM "posthog_sessionrecordingviewed"
- WHERE ("posthog_sessionrecordingviewed"."team_id" = 2
- AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.204
- '
- SELECT "posthog_persondistinctid"."id",
- "posthog_persondistinctid"."team_id",
- "posthog_persondistinctid"."person_id",
- "posthog_persondistinctid"."distinct_id",
- "posthog_persondistinctid"."version",
- "posthog_person"."id",
- "posthog_person"."created_at",
- "posthog_person"."properties_last_updated_at",
- "posthog_person"."properties_last_operation",
- "posthog_person"."team_id",
- "posthog_person"."properties",
- "posthog_person"."is_user_id",
- "posthog_person"."is_identified",
- "posthog_person"."uuid",
- "posthog_person"."version"
- FROM "posthog_persondistinctid"
- INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id")
- WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1',
- 'user10',
- 'user2',
- 'user3',
- 'user4',
- 'user5',
- 'user6',
- 'user7',
- 'user8',
- 'user9')
- AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.205
- '
- SELECT "posthog_persondistinctid"."id",
- "posthog_persondistinctid"."team_id",
- "posthog_persondistinctid"."person_id",
- "posthog_persondistinctid"."distinct_id",
- "posthog_persondistinctid"."version"
- FROM "posthog_persondistinctid"
- WHERE "posthog_persondistinctid"."person_id" IN (1,
- 2,
- 3,
- 4,
- 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.21
'
SELECT "posthog_instancesetting"."id",
@@ -2832,8 +2661,8 @@
COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count"
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
- WHERE ("posthog_sessionrecording"."session_id" IN ('2',
- '1')
+ WHERE ("posthog_sessionrecording"."session_id" IN ('1',
+ '2')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -3195,9 +3024,9 @@
COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count"
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
- WHERE ("posthog_sessionrecording"."session_id" IN ('2',
+ WHERE ("posthog_sessionrecording"."session_id" IN ('1',
'3',
- '1')
+ '2')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -3560,10 +3389,10 @@
COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count"
FROM "posthog_sessionrecording"
LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
- WHERE ("posthog_sessionrecording"."session_id" IN ('2',
+ WHERE ("posthog_sessionrecording"."session_id" IN ('4',
+ '1',
'3',
- '4',
- '1')
+ '2')
AND "posthog_sessionrecording"."team_id" = 2)
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
diff --git a/posthog/session_recordings/test/test_factory.py b/posthog/session_recordings/test/test_factory.py
index f67b69febb531..4213ff02f5566 100644
--- a/posthog/session_recordings/test/test_factory.py
+++ b/posthog/session_recordings/test/test_factory.py
@@ -8,8 +8,8 @@
from posthog.client import sync_execute
from posthog.kafka_client.client import ClickhouseProducer
from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_RECORDING_EVENTS
-from posthog.models.session_recording_event.sql import INSERT_SESSION_RECORDING_EVENT_SQL
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.sql.session_recording_event_sql import INSERT_SESSION_RECORDING_EVENT_SQL
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.session_recordings.session_recording_helpers import (
RRWEB_MAP_EVENT_TYPE,
legacy_preprocess_session_recording_events_for_clickhouse,
diff --git a/posthog/api/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py
similarity index 96%
rename from posthog/api/test/test_session_recordings.py
rename to posthog/session_recordings/test/test_session_recordings.py
index 47bd7f0e716a1..e52a241cf3049 100644
--- a/posthog/api/test/test_session_recordings.py
+++ b/posthog/session_recordings/test/test_session_recordings.py
@@ -12,14 +12,13 @@
from freezegun import freeze_time
from rest_framework import status
-from posthog.api.session_recording import DEFAULT_RECORDING_CHUNK_LIMIT
+from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed
from posthog.api.test.test_team import create_team
from posthog.constants import SESSION_RECORDINGS_FILTER_IDS
from posthog.models import Organization, Person, SessionRecording
from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter
-from posthog.models.session_recording_event import SessionRecordingViewed
from posthog.models.team import Team
-from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
+from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary
from posthog.session_recordings.test.test_factory import create_session_recording_events
from posthog.test.base import (
APIBaseTest,
@@ -170,7 +169,7 @@ def test_get_session_recordings(self):
(session_id_one, "user", base_time, base_time + relativedelta(seconds=30), 30, False, user.pk),
]
- @patch("posthog.api.session_recording.SessionRecordingListFromReplaySummary")
+ @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromReplaySummary")
def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_lister):
mock_summary_lister.return_value.run.return_value = ([], False)
@@ -371,6 +370,9 @@ def test_get_single_session_recording_metadata(self):
}
def test_get_default_limit_of_chunks(self):
+ # TODO import causes circular reference... but we're going to delete this soon so...
+ from posthog.session_recordings.session_recording_api import DEFAULT_RECORDING_CHUNK_LIMIT
+
base_time = now()
num_snapshots = DEFAULT_RECORDING_CHUNK_LIMIT + 10
@@ -401,6 +403,9 @@ def test_get_snapshots_is_compressed(self):
self.assertEqual(response.headers.get("Content-Encoding", None), "gzip")
def test_get_snapshots_for_chunked_session_recording(self):
+ # TODO import causes circular reference... but we're going to delete this soon so...
+ from posthog.session_recordings.session_recording_api import DEFAULT_RECORDING_CHUNK_LIMIT
+
chunked_session_id = "chunk_id"
expected_num_requests = 3
num_chunks = 60
@@ -561,7 +566,7 @@ def test_delete_session_recording(self):
# New snapshot loading method
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.api.session_recording.object_storage.list_objects")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_default_response(self, mock_list_objects) -> None:
session_id = str(uuid.uuid4())
timestamp = round(now().timestamp() * 1000)
@@ -597,7 +602,7 @@ def test_get_snapshots_v2_default_response(self, mock_list_objects) -> None:
mock_list_objects.assert_called_with(f"session_recordings/team_id/{self.team.pk}/session_id/{session_id}/data")
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.api.session_recording.object_storage.list_objects")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_upgrade_to_v2_if_stored_recording_requires_it(self, mock_list_objects: MagicMock) -> None:
session_id = str(uuid.uuid4())
timestamp = round(now().timestamp() * 1000)
@@ -622,7 +627,7 @@ def test_get_snapshots_upgrade_to_v2_if_stored_recording_requires_it(self, mock_
mock_list_objects.assert_not_called()
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.api.session_recording.object_storage.list_objects")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_from_lts(self, mock_list_objects: MagicMock) -> None:
session_id = str(uuid.uuid4())
timestamp = round(now().timestamp() * 1000)
@@ -679,7 +684,7 @@ def list_objects_func(path: str) -> List[str]:
]
@freeze_time("2023-01-01T00:00:00Z")
- @patch("posthog.api.session_recording.object_storage.list_objects")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects")
def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_objects) -> None:
session_id = str(uuid.uuid4())
old_timestamp = round((now() - timedelta(hours=26)).timestamp() * 1000)
@@ -701,9 +706,9 @@ def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_ob
]
}
- @patch("posthog.api.session_recording.SessionRecording.get_or_build")
- @patch("posthog.api.session_recording.object_storage.get_presigned_url")
- @patch("posthog.api.session_recording.requests")
+ @patch("posthog.session_recordings.session_recording_api.SessionRecording.get_or_build")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url")
+ @patch("posthog.session_recordings.session_recording_api.requests")
def test_can_get_session_recording_blob(
self, _mock_requests, mock_presigned_url, mock_get_session_recording
) -> None:
@@ -726,9 +731,9 @@ def presigned_url_sideeffect(key: str, **kwargs):
response = self.client.get(url)
assert response.status_code == status.HTTP_200_OK
- @patch("posthog.api.session_recording.SessionRecording.get_or_build")
- @patch("posthog.api.session_recording.object_storage.get_presigned_url")
- @patch("posthog.api.session_recording.requests")
+ @patch("posthog.session_recordings.session_recording_api.SessionRecording.get_or_build")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url")
+ @patch("posthog.session_recordings.session_recording_api.requests")
def test_cannot_get_session_recording_blob_for_made_up_sessions(
self, _mock_requests, mock_presigned_url, mock_get_session_recording
) -> None:
@@ -744,7 +749,7 @@ def test_cannot_get_session_recording_blob_for_made_up_sessions(
assert response.status_code == status.HTTP_404_NOT_FOUND
assert mock_presigned_url.call_count == 0
- @patch("posthog.api.session_recording.object_storage.get_presigned_url")
+ @patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url")
def test_can_not_get_session_recording_blob_that_does_not_exist(self, mock_presigned_url) -> None:
session_id = str(uuid.uuid4())
blob_key = f"session_recordings/team_id/{self.team.pk}/session_id/{session_id}/data/1682608337071"
diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py
index e43c7ddb817fc..86ba59e07a8d8 100644
--- a/posthog/tasks/test/test_usage_report.py
+++ b/posthog/tasks/test/test_usage_report.py
@@ -20,6 +20,7 @@
from posthog.hogql.query import execute_hogql_query
from posthog.models import Organization, Plugin, Team
from posthog.models.dashboard import Dashboard
+from posthog.models.event.util import create_event
from posthog.models.feature_flag import FeatureFlag
from posthog.models.group.util import create_group
from posthog.models.group_type_mapping import GroupTypeMapping
@@ -27,7 +28,16 @@
from posthog.models.sharing_configuration import SharingConfiguration
from posthog.schema import EventsQuery
from posthog.session_recordings.test.test_factory import create_snapshot
-from posthog.tasks.usage_report import capture_event, send_all_org_usage_reports
+from posthog.tasks.usage_report import (
+ _get_all_org_reports,
+ _get_all_usage_data_as_team_rows,
+ _get_full_org_usage_report,
+ _get_full_org_usage_report_as_dict,
+ _get_team_report,
+ capture_event,
+ get_instance_metadata,
+ send_all_org_usage_reports,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseDestroyTablesMixin,
@@ -37,8 +47,7 @@
flush_persons_and_events,
snapshot_clickhouse_queries,
)
-from posthog.models.event.util import create_event
-from posthog.utils import get_machine_id
+from posthog.utils import get_machine_id, get_previous_day
logger = structlog.get_logger(__name__)
@@ -212,7 +221,7 @@ def _create_sample_usage_data(self) -> None:
create_snapshot(
has_full_snapshot=True,
distinct_id=distinct_id,
- session_id=i,
+ session_id=str(i),
timestamp=now() - relativedelta(hours=12),
team_id=self.org_1_team_2.id,
)
@@ -223,7 +232,7 @@ def _create_sample_usage_data(self) -> None:
create_snapshot(
has_full_snapshot=True,
distinct_id=distinct_id,
- session_id=i + 10,
+ session_id=str(i + 10),
timestamp=now() - relativedelta(hours=48),
team_id=self.org_1_team_2.id,
)
@@ -296,16 +305,20 @@ def _test_usage_report(self) -> List[dict]:
self._create_plugin("Installed but not enabled", False)
self._create_plugin("Installed and enabled", True)
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
- report = all_reports[0]
assert report["table_sizes"]
assert report["table_sizes"]["posthog_event"] < 10**7 # <10MB
assert report["table_sizes"]["posthog_sessionrecordingevent"] < 10**7 # <10MB
assert len(all_reports) == 2
- expectation = [
+ expectations = [
{
"deployment_infrastructure": "tests",
"realm": "hosted-clickhouse",
@@ -316,12 +329,12 @@ def _test_usage_report(self) -> List[dict]:
"site_url": "http://test.posthog.com",
"product": "open source",
"helm": {},
- "clickhouse_version": all_reports[0]["clickhouse_version"],
+ "clickhouse_version": report["clickhouse_version"],
"users_who_logged_in": [],
"users_who_logged_in_count": 0,
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
- "table_sizes": all_reports[0]["table_sizes"],
+ "table_sizes": report["table_sizes"],
"plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
@@ -441,12 +454,12 @@ def _test_usage_report(self) -> List[dict]:
"site_url": "http://test.posthog.com",
"product": "open source",
"helm": {},
- "clickhouse_version": all_reports[1]["clickhouse_version"],
+ "clickhouse_version": report["clickhouse_version"],
"users_who_logged_in": [],
"users_who_logged_in_count": 0,
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
- "table_sizes": all_reports[1]["table_sizes"],
+ "table_sizes": report["table_sizes"],
"plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
@@ -525,18 +538,22 @@ def _test_usage_report(self) -> List[dict]:
},
]
- for item in expectation:
+ for item in expectations:
item.update(**self.expected_properties)
# tricky: list could be in different order
assert len(all_reports) == 2
- for report in all_reports:
- if report["organization_id"] == expectation[0]["organization_id"]:
- assert report == expectation[0]
- elif report["organization_id"] == expectation[1]["organization_id"]:
- assert report == expectation[1]
+ full_reports = []
+ for expectation in expectations:
+ report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(
+ all_reports[expectation["organization_id"]], get_instance_metadata(period)
+ )
+ )
+ assert report == expectation
+ full_reports.append(report)
- return all_reports
+ return full_reports
@freeze_time("2022-01-10T00:01:00Z")
@patch("os.environ", {"DEPLOYMENT": "tests"})
@@ -552,6 +569,8 @@ def test_unlicensed_usage_report(self, mock_post: MagicMock, mock_client: MagicM
mock_client.return_value = mock_posthog
all_reports = self._test_usage_report()
+ with self.settings(SITE_URL="http://test.posthog.com"):
+ send_all_org_usage_reports()
# Check calls to other services
mock_post.assert_not_called()
@@ -597,20 +616,21 @@ def test_usage_report_hogql_queries(self) -> None:
run_events_query(query=EventsQuery(select=["event"], limit=50), team=self.team)
sync_execute("SYSTEM FLUSH LOGS")
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
- assert len(all_reports) == 1
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_usage_data_as_team_rows(period_start, period_end)
- report = all_reports[0]["teams"][str(self.team.pk)]
+ report = _get_team_report(all_reports, self.team)
# We selected 200 or 50 rows, but still read 100 rows to return the query
- assert report["hogql_app_rows_read"] == 100
- assert report["hogql_app_bytes_read"] > 0
- assert report["event_explorer_app_rows_read"] == 100
- assert report["event_explorer_app_bytes_read"] > 0
+ assert report.hogql_app_rows_read == 100
+ assert report.hogql_app_bytes_read > 0
+ assert report.event_explorer_app_rows_read == 100
+ assert report.event_explorer_app_bytes_read > 0
# Nothing was read via the API
- assert report["hogql_api_rows_read"] == 0
- assert report["event_explorer_api_rows_read"] == 0
+ assert report.hogql_api_rows_read == 0
+ assert report.event_explorer_api_rows_read == 0
@freeze_time("2022-01-10T00:01:00Z")
@@ -680,21 +700,19 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho
flush_persons_and_events()
with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"):
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
assert len(all_reports) == 3
- all_reports = sorted(all_reports, key=lambda x: x["organization_name"])
-
- assert [all_reports["organization_name"] for all_reports in all_reports] == [
- "Org 1",
- "Org 2",
- "PostHog",
- ]
-
- org_1_report = all_reports[0]
- org_2_report = all_reports[1]
- analytics_report = all_reports[2]
+ org_1_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period))
+ )
+ assert org_1_report["organization_name"] == "Org 1"
+ org_2_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period))
+ )
assert org_1_report["organization_name"] == "Org 1"
assert org_1_report["decide_requests_count_in_period"] == 11
@@ -721,26 +739,6 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0
- # billing service calls are made only for org1, which has decide requests, and analytics org - which has decide usage events.
- calls = [
- call(
- org_1_report["organization_id"],
- ANY,
- ),
- call(
- analytics_report["organization_id"],
- ANY,
- ),
- ]
- assert billing_task_mock.delay.call_count == 2
- billing_task_mock.delay.assert_has_calls(
- calls,
- any_order=True,
- )
-
- # capture usage report calls are made for all orgs
- assert posthog_capture_mock.return_value.capture.call_count == 3
-
@patch("posthog.tasks.usage_report.Client")
@patch("posthog.tasks.usage_report.send_report_to_billing_service")
def test_usage_report_local_evaluation_requests(
@@ -792,21 +790,19 @@ def test_usage_report_local_evaluation_requests(
flush_persons_and_events()
with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"):
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
assert len(all_reports) == 3
- all_reports = sorted(all_reports, key=lambda x: x["organization_name"])
-
- assert [all_reports["organization_name"] for all_reports in all_reports] == [
- "Org 1",
- "Org 2",
- "PostHog",
- ]
-
- org_1_report = all_reports[0]
- org_2_report = all_reports[1]
- analytics_report = all_reports[2]
+ org_1_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period))
+ )
+ assert org_1_report["organization_name"] == "Org 1"
+ org_2_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period))
+ )
assert org_1_report["organization_name"] == "Org 1"
assert org_1_report["local_evaluation_requests_count_in_period"] == 11
@@ -837,26 +833,6 @@ def test_usage_report_local_evaluation_requests(
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0
- # billing service calls are made only for org1, which has decide requests, and analytics org - which has local evaluation usage events.
- calls = [
- call(
- org_1_report["organization_id"],
- ANY,
- ),
- call(
- analytics_report["organization_id"],
- ANY,
- ),
- ]
- assert billing_task_mock.delay.call_count == 2
- billing_task_mock.delay.assert_has_calls(
- calls,
- any_order=True,
- )
-
- # capture usage report calls are made for all orgs
- assert posthog_capture_mock.return_value.capture.call_count == 3
-
class SendUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest):
def setUp(self) -> None:
@@ -907,18 +883,26 @@ def test_send_usage(self, mock_post: MagicMock, mock_client: MagicMock) -> None:
mock_posthog = MagicMock()
mock_client.return_value = mock_posthog
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ full_report_as_dict = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
+ send_all_org_usage_reports(dry_run=False)
license = License.objects.first()
assert license
token = build_billing_token(license, self.organization)
mock_post.assert_called_once_with(
- f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"}
+ f"{BILLING_SERVICE_URL}/api/usage",
+ json=full_report_as_dict,
+ headers={"Authorization": f"Bearer {token}"},
)
mock_posthog.capture.assert_any_call(
get_machine_id(),
"organization usage report",
- {**all_reports[0], "scope": "machine"},
+ {**full_report_as_dict, "scope": "machine"},
groups={"instance": ANY},
timestamp=None,
)
@@ -935,18 +919,26 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) ->
mock_posthog = MagicMock()
mock_client.return_value = mock_posthog
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ full_report_as_dict = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
+ send_all_org_usage_reports(dry_run=False)
license = License.objects.first()
assert license
token = build_billing_token(license, self.organization)
mock_post.assert_called_once_with(
- f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"}
+ f"{BILLING_SERVICE_URL}/api/usage",
+ json=full_report_as_dict,
+ headers={"Authorization": f"Bearer {token}"},
)
mock_posthog.capture.assert_any_call(
self.user.distinct_id,
"organization usage report",
- {**all_reports[0], "scope": "user"},
+ {**full_report_as_dict, "scope": "user"},
groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)},
timestamp=None,
)
diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py
index 45f82b9882374..612213086629e 100644
--- a/posthog/tasks/usage_report.py
+++ b/posthog/tasks/usage_report.py
@@ -534,6 +534,281 @@ def convert_team_usage_rows_to_dict(rows: List[Union[dict, Tuple[int, int]]]) ->
return team_id_map
+def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[str, Any]:
+ """
+ Gets all usage data for the specified period. Clickhouse is good at counting things so
+ we count across all teams rather than doing it one by one
+ """
+ return dict(
+ teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(),
+ teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period(
+ period_start, period_end, count_distinct=True
+ ),
+ teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period(
+ period_start.replace(day=1), period_end
+ ),
+ teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period(
+ period_start, period_end
+ ),
+ # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end),
+ # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end),
+ teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end),
+ teams_with_recording_count_total=get_teams_with_recording_count_total(),
+ teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
+ period_start, period_end, FlagRequestType.DECIDE
+ ),
+ teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
+ period_start.replace(day=1), period_end, FlagRequestType.DECIDE
+ ),
+ teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
+ period_start, period_end, FlagRequestType.LOCAL_EVALUATION
+ ),
+ teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
+ period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION
+ ),
+ teams_with_group_types_total=list(
+ GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_dashboard_count=list(
+ Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_dashboard_template_count=list(
+ Dashboard.objects.filter(creation_mode="template")
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_dashboard_shared_count=list(
+ Dashboard.objects.filter(sharingconfiguration__enabled=True)
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_dashboard_tagged_count=list(
+ Dashboard.objects.filter(tagged_items__isnull=False)
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_ff_count=list(FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")),
+ teams_with_ff_active_count=list(
+ FeatureFlag.objects.filter(active=True).values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_app_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_hogql_api_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ )
+
+
+def _get_all_usage_data_as_team_rows(period_start: datetime, period_end: datetime) -> Dict[str, Any]:
+ """
+ Gets all usage data for the specified period as a map of team_id -> value. This makes it faster
+ to access the data than looping over all_data to find what we want.
+ """
+ all_data = _get_all_usage_data(period_start, period_end)
+ # convert it to a map of team_id -> value
+ for key, rows in all_data.items():
+ all_data[key] = convert_team_usage_rows_to_dict(rows)
+ return all_data
+
+
+def _get_teams_for_usage_reports() -> Sequence[Team]:
+ return list(
+ Team.objects.select_related("organization").exclude(
+ Q(organization__for_internal_metrics=True) | Q(is_demo=True)
+ )
+ )
+
+
+def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounters:
+ decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0)
+ decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0)
+ local_evaluation_requests_count_in_period = all_data["teams_with_local_evaluation_requests_count_in_period"].get(
+ team.id, 0
+ )
+ local_evaluation_requests_count_in_month = all_data["teams_with_local_evaluation_requests_count_in_month"].get(
+ team.id, 0
+ )
+ return UsageReportCounters(
+ event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0),
+ event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0),
+ event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0),
+ event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(team.id, 0),
+ # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0),
+ # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0),
+ recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0),
+ recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0),
+ group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0),
+ decide_requests_count_in_period=decide_requests_count_in_period,
+ decide_requests_count_in_month=decide_requests_count_in_month,
+ local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period,
+ local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month,
+ billable_feature_flag_requests_count_in_month=decide_requests_count_in_month
+ + (local_evaluation_requests_count_in_month * 10),
+ billable_feature_flag_requests_count_in_period=decide_requests_count_in_period
+ + (local_evaluation_requests_count_in_period * 10),
+ dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0),
+ dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0),
+ dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0),
+ dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0),
+ ff_count=all_data["teams_with_ff_count"].get(team.id, 0),
+ ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0),
+ hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0),
+ hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0),
+ hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0),
+ hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0),
+ hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0),
+ hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0),
+ event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0),
+ event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0),
+ event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0),
+ event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0),
+ event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0),
+ event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0),
+ )
+
+
+def _add_team_report_to_org_reports(
+ org_reports: Dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime
+) -> None:
+ org_id = str(team.organization.id)
+ if org_id not in org_reports:
+ org_report = OrgReport(
+ date=period_start.strftime("%Y-%m-%d"),
+ organization_id=org_id,
+ organization_name=team.organization.name,
+ organization_created_at=team.organization.created_at.isoformat(),
+ organization_user_count=get_org_user_count(org_id),
+ team_count=1,
+ teams={str(team.id): team_report},
+ **dataclasses.asdict(team_report), # Clone the team report as the basis
+ )
+ org_reports[org_id] = org_report
+ else:
+ org_report = org_reports[org_id]
+ org_report.teams[str(team.id)] = team_report
+ org_report.team_count += 1
+
+ # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report
+ for field in dataclasses.fields(UsageReportCounters):
+ if hasattr(team_report, field.name):
+ setattr(
+ org_report,
+ field.name,
+ getattr(org_report, field.name) + getattr(team_report, field.name),
+ )
+
+
+def _get_all_org_reports(period_start: datetime, period_end: datetime) -> Dict[str, OrgReport]:
+ all_data = _get_all_usage_data_as_team_rows(period_start, period_end)
+
+ teams = _get_teams_for_usage_reports()
+
+ org_reports: Dict[str, OrgReport] = {}
+
+ print("Generating reports for teams...") # noqa T201
+ time_now = datetime.now()
+ for team in teams:
+ team_report = _get_team_report(all_data, team)
+ _add_team_report_to_org_reports(org_reports, team, team_report, period_start)
+
+ time_since = datetime.now() - time_now
+ print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201
+ return org_reports
+
+
+def _get_full_org_usage_report(org_report: OrgReport, instance_metadata: InstanceMetadata) -> FullUsageReport:
+ return FullUsageReport(
+ **dataclasses.asdict(org_report),
+ **dataclasses.asdict(instance_metadata),
+ )
+
+
+def _get_full_org_usage_report_as_dict(full_report: FullUsageReport) -> Dict[str, Any]:
+ return dataclasses.asdict(full_report)
+
+
@app.task(ignore_result=True, max_retries=3, autoretry_for=(Exception,))
def send_all_org_usage_reports(
dry_run: bool = False,
@@ -541,7 +816,7 @@ def send_all_org_usage_reports(
capture_event_name: Optional[str] = None,
skip_capture_event: bool = False,
only_organization_id: Optional[str] = None,
-) -> List[dict]: # Dict[str, OrgReport]:
+) -> None:
capture_event_name = capture_event_name or "organization usage report"
at_date = parser.parse(at) if at else None
@@ -550,250 +825,8 @@ def send_all_org_usage_reports(
instance_metadata = get_instance_metadata(period)
- # Clickhouse is good at counting things so we count across all teams rather than doing it one by one
try:
- all_data = dict(
- teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(),
- teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period(
- period_start, period_end, count_distinct=True
- ),
- teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period(
- period_start.replace(day=1), period_end
- ),
- teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period(
- period_start, period_end
- ),
- # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end),
- # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end),
- teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end),
- teams_with_recording_count_total=get_teams_with_recording_count_total(),
- teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
- period_start, period_end, FlagRequestType.DECIDE
- ),
- teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
- period_start.replace(day=1), period_end, FlagRequestType.DECIDE
- ),
- teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
- period_start, period_end, FlagRequestType.LOCAL_EVALUATION
- ),
- teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
- period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION
- ),
- teams_with_group_types_total=list(
- GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_dashboard_count=list(
- Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_dashboard_template_count=list(
- Dashboard.objects.filter(creation_mode="template")
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_dashboard_shared_count=list(
- Dashboard.objects.filter(sharingconfiguration__enabled=True)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_dashboard_tagged_count=list(
- Dashboard.objects.filter(tagged_items__isnull=False)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_ff_count=list(
- FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_ff_active_count=list(
- FeatureFlag.objects.filter(active=True)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_app_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_hogql_api_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- )
-
- # The data is all as raw rows which will dramatically slow down the upcoming loop
- # so we convert it to a map of team_id -> value
- for key, rows in all_data.items():
- all_data[key] = convert_team_usage_rows_to_dict(rows)
-
- teams: Sequence[Team] = list(
- Team.objects.select_related("organization").exclude(
- Q(organization__for_internal_metrics=True) | Q(is_demo=True)
- )
- )
-
- org_reports: Dict[str, OrgReport] = {}
-
- print("Generating reports for teams...") # noqa T201
- time_now = datetime.now()
- for team in teams:
- decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0)
- decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0)
- local_evaluation_requests_count_in_period = all_data[
- "teams_with_local_evaluation_requests_count_in_period"
- ].get(team.id, 0)
- local_evaluation_requests_count_in_month = all_data[
- "teams_with_local_evaluation_requests_count_in_month"
- ].get(team.id, 0)
-
- team_report = UsageReportCounters(
- event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0),
- event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0),
- event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0),
- event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(
- team.id, 0
- ),
- # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0),
- # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0),
- recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0),
- recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0),
- group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0),
- decide_requests_count_in_period=decide_requests_count_in_period,
- decide_requests_count_in_month=decide_requests_count_in_month,
- local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period,
- local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month,
- billable_feature_flag_requests_count_in_month=decide_requests_count_in_month
- + (local_evaluation_requests_count_in_month * 10),
- billable_feature_flag_requests_count_in_period=decide_requests_count_in_period
- + (local_evaluation_requests_count_in_period * 10),
- dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0),
- dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0),
- dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0),
- dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0),
- ff_count=all_data["teams_with_ff_count"].get(team.id, 0),
- ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0),
- hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0),
- hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0),
- hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0),
- hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0),
- hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0),
- hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0),
- event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0),
- event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0),
- event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0),
- event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0),
- event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0),
- event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0),
- )
-
- org_id = str(team.organization.id)
-
- if org_id not in org_reports:
- org_report = OrgReport(
- date=period_start.strftime("%Y-%m-%d"),
- organization_id=org_id,
- organization_name=team.organization.name,
- organization_created_at=team.organization.created_at.isoformat(),
- organization_user_count=get_org_user_count(org_id),
- team_count=1,
- teams={str(team.id): team_report},
- **dataclasses.asdict(team_report), # Clone the team report as the basis
- )
- org_reports[org_id] = org_report
- else:
- org_report = org_reports[org_id]
- org_report.teams[str(team.id)] = team_report
- org_report.team_count += 1
-
- # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report
- for field in dataclasses.fields(UsageReportCounters):
- if hasattr(team_report, field.name):
- setattr(
- org_report,
- field.name,
- getattr(org_report, field.name) + getattr(team_report, field.name),
- )
- time_since = datetime.now() - time_now
- print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201
-
- all_reports = []
+ org_reports = _get_all_org_reports(period_start, period_end)
print("Sending usage reports to PostHog and Billing...") # noqa T201
time_now = datetime.now()
@@ -803,12 +836,8 @@ def send_all_org_usage_reports(
if only_organization_id and only_organization_id != org_id:
continue
- full_report = FullUsageReport(
- **dataclasses.asdict(org_report),
- **dataclasses.asdict(instance_metadata),
- )
- full_report_dict = dataclasses.asdict(full_report)
- all_reports.append(full_report_dict)
+ full_report = _get_full_org_usage_report(org_report, instance_metadata)
+ full_report_dict = _get_full_org_usage_report_as_dict(full_report)
if dry_run:
continue
@@ -823,7 +852,6 @@ def send_all_org_usage_reports(
send_report_to_billing_service.delay(org_id, full_report_dict)
time_since = datetime.now() - time_now
print(f"Sending usage reports to PostHog and Billing took {time_since.total_seconds()} seconds.") # noqa T201
- return all_reports
except Exception as err:
capture_exception(err)
raise err
diff --git a/posthog/temporal/tests/batch_exports/base.py b/posthog/temporal/tests/batch_exports/base.py
index b1ab1caf90909..88a52fe798426 100644
--- a/posthog/temporal/tests/batch_exports/base.py
+++ b/posthog/temporal/tests/batch_exports/base.py
@@ -1,3 +1,4 @@
+import datetime as dt
import json
import typing
@@ -71,3 +72,10 @@ async def insert_events(client: ClickHouseClient, events: list[EventValues]):
def amaterialize(table: typing.Literal["events", "person", "groups"], column: str):
"""Materialize a column in a table."""
return materialize(table, column)
+
+
+def to_isoformat(d: str | None) -> str | None:
+ """Parse a string and return it as default isoformatted."""
+ if d is None:
+ return None
+ return dt.datetime.fromisoformat(d).replace(tzinfo=dt.timezone.utc).isoformat()
diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py
index 913cd5b45d2c3..50ee763b5d4d9 100644
--- a/posthog/temporal/tests/batch_exports/test_batch_exports.py
+++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py
@@ -12,6 +12,9 @@
import pytest_asyncio
from django.conf import settings
+from posthog.temporal.tests.batch_exports.base import (
+ to_isoformat,
+)
from posthog.temporal.workflows.batch_exports import (
BatchExportTemporaryFile,
get_data_interval,
@@ -288,8 +291,13 @@ async def test_get_results_iterator(client):
for expected, result in zip(all_expected, all_result):
for key, value in result.items():
+ if key in ("timestamp", "inserted_at", "created_at"):
+ expected_value = to_isoformat(expected[key])
+ else:
+ expected_value = expected[key]
+
# Some keys will be missing from result, so let's only check the ones we have.
- assert value == expected[key], f"{key} value in {result} didn't match value in {expected}"
+ assert value == expected_value, f"{key} value in {result} didn't match value in {expected}"
@pytest.mark.django_db
@@ -343,8 +351,13 @@ async def test_get_results_iterator_handles_duplicates(client):
for expected, result in zip(all_expected, all_result):
for key, value in result.items():
+ if key in ("timestamp", "inserted_at", "created_at"):
+ expected_value = to_isoformat(expected[key])
+ else:
+ expected_value = expected[key]
+
# Some keys will be missing from result, so let's only check the ones we have.
- assert value == expected[key], f"{key} value in {result} didn't match value in {expected}"
+ assert value == expected_value, f"{key} value in {result} didn't match value in {expected}"
@pytest.mark.django_db
@@ -400,8 +413,13 @@ async def test_get_results_iterator_can_exclude_events(client):
for expected, result in zip(all_expected, all_result):
for key, value in result.items():
+ if key in ("timestamp", "inserted_at", "created_at"):
+ expected_value = to_isoformat(expected[key])
+ else:
+ expected_value = expected[key]
+
# Some keys will be missing from result, so let's only check the ones we have.
- assert value == expected[key], f"{key} value in {result} didn't match value in {expected}"
+ assert value == expected_value, f"{key} value in {result} didn't match value in {expected}"
@pytest.mark.parametrize(
diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
index 392534fc8999c..08f0d285a944c 100644
--- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
@@ -3,11 +3,13 @@
import gzip
import itertools
import json
+import os
from random import randint
from unittest import mock
from uuid import uuid4
import boto3
+import botocore.exceptions
import brotli
import pytest
from django.conf import settings
@@ -23,6 +25,7 @@
EventValues,
amaterialize,
insert_events,
+ to_isoformat,
)
from posthog.temporal.tests.batch_exports.fixtures import (
acreate_batch_export,
@@ -40,6 +43,18 @@
TEST_ROOT_BUCKET = "test-batch-exports"
+
+def check_valid_credentials() -> bool:
+ """Check if there are valid AWS credentials in the environment."""
+ sts = boto3.client("sts")
+ try:
+ sts.get_caller_identity()
+ except botocore.exceptions.ClientError:
+ return False
+ else:
+ return True
+
+
create_test_client = functools.partial(boto3.client, endpoint_url=settings.OBJECT_STORAGE_ENDPOINT)
@@ -110,12 +125,19 @@ def assert_events_in_s3(
if exclude_events is None:
exclude_events = []
- expected_events = [
- {k: v for k, v in event.items() if k not in ["team_id", "_timestamp"]}
- for event in events
- if event["event"] not in exclude_events
- ]
- expected_events.sort(key=lambda x: x["timestamp"])
+ def to_expected_event(event):
+ mapping_functions = {
+ "timestamp": to_isoformat,
+ "inserted_at": to_isoformat,
+ "created_at": to_isoformat,
+ }
+ return {
+ k: mapping_functions.get(k, lambda x: x)(v) for k, v in event.items() if k not in ["team_id", "_timestamp"]
+ }
+
+ expected_events = list(map(to_expected_event, (event for event in events if event["event"] not in exclude_events)))
+
+ expected_events.sort(key=lambda x: x["timestamp"] if x["timestamp"] is not None else 0)
# First check one event, the first one, so that we can get a nice diff if
# the included data is different.
@@ -422,6 +444,165 @@ async def test_s3_export_workflow_with_minio_bucket(
assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+@pytest.mark.skipif(
+ "S3_TEST_BUCKET" not in os.environ or not check_valid_credentials(),
+ reason="AWS credentials not set in environment or missing S3_TEST_BUCKET variable",
+)
+@pytest.mark.django_db
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "interval,compression,encryption,exclude_events",
+ itertools.product(["hour", "day"], [None, "gzip", "brotli"], [None, "AES256", "aws:kms"], [None, ["test-exclude"]]),
+)
+async def test_s3_export_workflow_with_s3_bucket(interval, compression, encryption, exclude_events):
+ """Test S3 Export Workflow end-to-end by using an S3 bucket.
+
+ The S3_TEST_BUCKET environment variable is used to set the name of the bucket for this test.
+ This test will be skipped if no valid AWS credentials exist, or if the S3_TEST_BUCKET environment
+ variable is not set.
+
+ The workflow should update the batch export run status to completed and produce the expected
+ records to the S3 bucket.
+ """
+ bucket_name = os.getenv("S3_TEST_BUCKET")
+ kms_key_id = os.getenv("S3_TEST_KMS_KEY_ID")
+ prefix = f"posthog-events-{str(uuid4())}"
+ destination_data = {
+ "type": "S3",
+ "config": {
+ "bucket_name": bucket_name,
+ "region": "us-east-1",
+ "prefix": prefix,
+ "aws_access_key_id": "object_storage_root_user",
+ "aws_secret_access_key": "object_storage_root_password",
+ "compression": compression,
+ "exclude_events": exclude_events,
+ "encryption": encryption,
+ "kms_key_id": kms_key_id if encryption == "aws:kms" else None,
+ },
+ }
+
+ batch_export_data = {
+ "name": "my-production-s3-bucket-destination",
+ "destination": destination_data,
+ "interval": interval,
+ }
+
+ organization = await acreate_organization("test")
+ team = await acreate_team(organization=organization)
+ batch_export = await acreate_batch_export(
+ team_id=team.pk,
+ name=batch_export_data["name"],
+ destination_data=batch_export_data["destination"],
+ interval=batch_export_data["interval"],
+ )
+
+ events: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 13:30:00.000000",
+ "created_at": "2023-04-25 13:30:00.000000",
+ "inserted_at": "2023-04-25 13:30:00.000000",
+ "_timestamp": "2023-04-25 13:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ {
+ "uuid": str(uuid4()),
+ "event": "test-exclude",
+ "timestamp": "2023-04-25 14:29:00.000000",
+ "created_at": "2023-04-25 14:29:00.000000",
+ "inserted_at": "2023-04-25 14:29:00.000000",
+ "_timestamp": "2023-04-25 14:29:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ ]
+
+ if interval == "day":
+ # Add an event outside the hour range but within the day range to ensure it's exported too.
+ events_outside_hour: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 00:30:00.000000",
+ "created_at": "2023-04-25 00:30:00.000000",
+ "inserted_at": "2023-04-25 00:30:00.000000",
+ "_timestamp": "2023-04-25 00:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ }
+ ]
+ events += events_outside_hour
+
+ ch_client = ClickHouseClient(
+ url=settings.CLICKHOUSE_HTTP_URL,
+ user=settings.CLICKHOUSE_USER,
+ password=settings.CLICKHOUSE_PASSWORD,
+ database=settings.CLICKHOUSE_DATABASE,
+ )
+
+ # Insert some data into the `sharded_events` table.
+ await insert_events(
+ client=ch_client,
+ events=events,
+ )
+
+ workflow_id = str(uuid4())
+ inputs = S3BatchExportInputs(
+ team_id=team.pk,
+ batch_export_id=str(batch_export.id),
+ data_interval_end="2023-04-25 14:30:00.000000",
+ interval=interval,
+ **batch_export.destination.config,
+ )
+
+ s3_client = boto3.client("s3")
+
+ def create_s3_client(*args, **kwargs):
+ """Mock function to return an already initialized S3 client."""
+ return s3_client
+
+ async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
+ async with Worker(
+ activity_environment.client,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ workflows=[S3BatchExportWorkflow],
+ activities=[create_export_run, insert_into_s3_activity, update_export_run_status],
+ workflow_runner=UnsandboxedWorkflowRunner(),
+ ):
+ with mock.patch("posthog.temporal.workflows.s3_batch_export.boto3.client", side_effect=create_s3_client):
+ await activity_environment.client.execute_workflow(
+ S3BatchExportWorkflow.run,
+ inputs,
+ id=workflow_id,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ retry_policy=RetryPolicy(maximum_attempts=1),
+ execution_timeout=dt.timedelta(seconds=10),
+ )
+
+ runs = await afetch_batch_export_runs(batch_export_id=batch_export.id)
+ assert len(runs) == 1
+
+ run = runs[0]
+ assert run.status == "Completed"
+
+ assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+
+
@pytest.mark.django_db
@pytest.mark.asyncio
@pytest.mark.parametrize("compression", [None, "gzip"])
diff --git a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
index 3b153668d5549..979929d1ce205 100644
--- a/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py
@@ -21,6 +21,7 @@
from posthog.temporal.tests.batch_exports.base import (
EventValues,
insert_events,
+ to_isoformat,
)
from posthog.temporal.tests.batch_exports.fixtures import (
acreate_batch_export,
@@ -369,14 +370,16 @@ async def test_snowflake_export_workflow_exports_events_in_the_last_hour_for_the
]
json_data.sort(key=lambda x: x["timestamp"])
# Drop _timestamp and team_id from events
- expected_events = [
- {
+ expected_events = []
+ for event in events:
+ expected_event = {
key: value
for key, value in event.items()
if key in ("uuid", "event", "timestamp", "properties", "person_id")
}
- for event in events
- ]
+ expected_event["timestamp"] = to_isoformat(event["timestamp"])
+ expected_events.append(expected_event)
+
assert json_data[0] == expected_events[0]
assert json_data == expected_events
diff --git a/posthog/temporal/workflows/batch_exports.py b/posthog/temporal/workflows/batch_exports.py
index 5fe3cde5e123a..c79262a0fe86a 100644
--- a/posthog/temporal/workflows/batch_exports.py
+++ b/posthog/temporal/workflows/batch_exports.py
@@ -143,14 +143,12 @@ def iter_batch_records(batch) -> typing.Generator[dict[str, typing.Any], None, N
elements = json.dumps(record.get("elements_chain").decode())
record = {
- "created_at": record.get("created_at").strftime("%Y-%m-%d %H:%M:%S.%f"),
+ "created_at": record.get("created_at").isoformat(),
"distinct_id": record.get("distinct_id").decode(),
"elements": elements,
"elements_chain": record.get("elements_chain").decode(),
"event": record.get("event").decode(),
- "inserted_at": record.get("inserted_at").strftime("%Y-%m-%d %H:%M:%S.%f")
- if record.get("inserted_at")
- else None,
+ "inserted_at": record.get("inserted_at").isoformat() if record.get("inserted_at") else None,
"ip": properties.get("$ip", None) if properties else None,
"person_id": record.get("person_id").decode(),
"person_properties": json.loads(person_properties) if person_properties else None,
@@ -159,7 +157,7 @@ def iter_batch_records(batch) -> typing.Generator[dict[str, typing.Any], None, N
"properties": properties,
"site_url": properties.get("$current_url", None) if properties else None,
"team_id": record.get("team_id"),
- "timestamp": record.get("timestamp").strftime("%Y-%m-%d %H:%M:%S.%f"),
+ "timestamp": record.get("timestamp").isoformat(),
"uuid": record.get("uuid").decode(),
}
diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py
index a396f361b77c5..b81c7496b3adb 100644
--- a/posthog/temporal/workflows/postgres_batch_export.py
+++ b/posthog/temporal/workflows/postgres_batch_export.py
@@ -58,9 +58,10 @@ def copy_tsv_to_postgres(tsv_file, postgres_connection, schema: str, table_name:
tsv_file.seek(0)
with postgres_connection.cursor() as cursor:
+ cursor.execute(sql.SQL("SET search_path TO {schema}").format(schema=sql.Identifier(schema)))
cursor.copy_from(
tsv_file,
- sql.Identifier(schema, table_name).as_string(postgres_connection),
+ table_name,
null="",
columns=schema_columns,
)
@@ -245,7 +246,11 @@ async def run(self, inputs: PostgresBatchExportInputs):
initial_interval=dt.timedelta(seconds=10),
maximum_interval=dt.timedelta(seconds=120),
maximum_attempts=10,
- non_retryable_error_types=[],
+ non_retryable_error_types=[
+ # Raised on errors that are related to database operation.
+ # For example: unexpected disconnect, database or other object not found.
+ "OperationalError"
+ ],
),
)
diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py
index 028b6f422e26f..13bbf183e5d06 100644
--- a/posthog/temporal/workflows/s3_batch_export.py
+++ b/posthog/temporal/workflows/s3_batch_export.py
@@ -85,15 +85,20 @@ class S3MultiPartUploadState(typing.NamedTuple):
parts: list[dict[str, str | int]]
+Part = dict[str, str | int]
+
+
class S3MultiPartUpload:
"""An S3 multi-part upload."""
- def __init__(self, s3_client, bucket_name, key):
+ def __init__(self, s3_client, bucket_name: str, key: str, encryption: str | None, kms_key_id: str | None):
self.s3_client = s3_client
self.bucket_name = bucket_name
self.key = key
- self.upload_id = None
- self.parts = []
+ self.encryption = encryption
+ self.kms_key_id = kms_key_id
+ self.upload_id: str | None = None
+ self.parts: list[Part] = []
def to_state(self) -> S3MultiPartUploadState:
"""Produce state tuple that can be used to resume this S3MultiPartUpload."""
@@ -119,10 +124,21 @@ def start(self) -> str:
if self.is_upload_in_progress() is True:
raise UploadAlreadyInProgressError(self.upload_id)
- multipart_response = self.s3_client.create_multipart_upload(Bucket=self.bucket_name, Key=self.key)
- self.upload_id = multipart_response["UploadId"]
+ optional_kwargs = {}
+ if self.encryption:
+ optional_kwargs["ServerSideEncryption"] = self.encryption
+ if self.kms_key_id:
+ optional_kwargs["SSEKMSKeyId"] = self.kms_key_id
- return self.upload_id
+ multipart_response = self.s3_client.create_multipart_upload(
+ Bucket=self.bucket_name,
+ Key=self.key,
+ **optional_kwargs,
+ )
+ upload_id: str = multipart_response["UploadId"]
+ self.upload_id = upload_id
+
+ return upload_id
def continue_from_state(self, state: S3MultiPartUploadState):
"""Continue this S3MultiPartUpload from a previous state."""
@@ -230,6 +246,8 @@ class S3InsertInputs:
aws_secret_access_key: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]:
@@ -241,7 +259,7 @@ def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3Mu
aws_access_key_id=inputs.aws_access_key_id,
aws_secret_access_key=inputs.aws_secret_access_key,
)
- s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key)
+ s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key, inputs.encryption, inputs.kms_key_id)
details = activity.info().heartbeat_details
@@ -442,6 +460,8 @@ async def run(self, inputs: S3BatchExportInputs):
data_interval_end=data_interval_end.isoformat(),
compression=inputs.compression,
exclude_events=inputs.exclude_events,
+ encryption=inputs.encryption,
+ kms_key_id=inputs.kms_key_id,
)
try:
await workflow.execute_activity(
diff --git a/posthog/test/base.py b/posthog/test/base.py
index b4b60d36363c4..8b66387037a7c 100644
--- a/posthog/test/base.py
+++ b/posthog/test/base.py
@@ -39,12 +39,12 @@
TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL,
)
from posthog.models.person.util import bulk_create_persons, create_person
-from posthog.models.session_recording_event.sql import (
+from posthog.session_recordings.sql.session_recording_event_sql import (
DISTRIBUTED_SESSION_RECORDING_EVENTS_TABLE_SQL,
DROP_SESSION_RECORDING_EVENTS_TABLE_SQL,
SESSION_RECORDING_EVENTS_TABLE_SQL,
)
-from posthog.models.session_replay_event.sql import (
+from posthog.session_recordings.sql.session_replay_event_sql import (
DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
DROP_SESSION_REPLAY_EVENTS_TABLE_SQL,
SESSION_REPLAY_EVENTS_TABLE_SQL,
diff --git a/requirements-dev.in b/requirements-dev.in
index 8abc899cfbd14..b21da42a9ee2e 100644
--- a/requirements-dev.in
+++ b/requirements-dev.in
@@ -15,7 +15,7 @@ ruff>=0.0.257
pip-tools==6.13.0
mypy==0.981
mypy-extensions==0.4.3
-datamodel-code-generator==0.16.1
+datamodel-code-generator==0.21.5
djangorestframework-stubs==1.4.0
django-stubs==1.8.0
Faker==17.5.0
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 8b0f3b0eb4875..2d93601fe05ee 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -4,6 +4,10 @@
#
# pip-compile requirements-dev.in
#
+annotated-types==0.5.0
+ # via
+ # -c requirements.txt
+ # pydantic
argcomplete==2.0.0
# via datamodel-code-generator
asgiref==3.3.2
@@ -62,7 +66,7 @@ cryptography==37.0.2
# -c requirements.txt
# pyopenssl
# urllib3
-datamodel-code-generator==0.16.1
+datamodel-code-generator==0.21.5
# via -r requirements-dev.in
django==3.2.19
# via
@@ -83,7 +87,7 @@ dnspython==2.2.1
# email-validator
docopt==0.6.2
# via pytest-watch
-email-validator==1.3.1
+email-validator==2.0.0.post2
# via pydantic
exceptiongroup==1.1.2
# via pytest
@@ -178,10 +182,14 @@ pycparser==2.20
# via
# -c requirements.txt
# cffi
-pydantic[email]==1.10.4
+pydantic[email]==2.3.0
# via
# -c requirements.txt
# datamodel-code-generator
+pydantic-core==2.6.3
+ # via
+ # -c requirements.txt
+ # pydantic
pyopenssl==22.0.0
# via
# -c requirements.txt
@@ -291,8 +299,6 @@ tomli==1.2.3
# mypy
# pyproject-hooks
# pytest
-typed-ast==1.5.4
- # via datamodel-code-generator
types-freezegun==1.1.10
# via -r requirements-dev.in
types-markdown==3.3.9
@@ -309,7 +315,7 @@ types-redis==4.3.20
# via -r requirements-dev.in
types-requests==2.26.1
# via -r requirements-dev.in
-typing-extensions==4.4.0
+typing-extensions==4.7.1
# via
# -c requirements.txt
# django-stubs
@@ -318,6 +324,7 @@ typing-extensions==4.4.0
# jsonschema-spec
# mypy
# pydantic
+ # pydantic-core
uritemplate==4.1.1
# via
# -c requirements.txt
diff --git a/requirements.in b/requirements.in
index 0657d57392b54..222e788c48182 100644
--- a/requirements.in
+++ b/requirements.in
@@ -55,9 +55,9 @@ pickleshare==0.7.5
Pillow==9.2.0
posthoganalytics==3.0.1
prance==0.22.2.22.0
-psycopg2-binary==2.8.6
+psycopg2-binary==2.9.7
pyarrow==12.0.1
-pydantic==1.10.4
+pydantic==2.3.0
pyjwt==2.4.0
python-dateutil>=2.8.2
python3-saml==1.12.0
diff --git a/requirements.txt b/requirements.txt
index 2190093813d3c..949d3fc8cc1ec 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -15,6 +15,8 @@ amqp==2.6.0
# via
# -r requirements.in
# kombu
+annotated-types==0.5.0
+ # via pydantic
antlr4-python3-runtime==4.13.0
# via -r requirements.in
asgiref==3.3.2
@@ -321,7 +323,7 @@ protobuf==4.22.1
# grpcio-status
# proto-plus
# temporalio
-psycopg2-binary==2.8.6
+psycopg2-binary==2.9.7
# via -r requirements.in
ptyprocess==0.6.0
# via pexpect
@@ -337,8 +339,10 @@ pycparser==2.20
# via cffi
pycryptodomex==3.18.0
# via snowflake-connector-python
-pydantic==1.10.4
+pydantic==2.3.0
# via -r requirements.in
+pydantic-core==2.6.3
+ # via pydantic
pyjwt==2.4.0
# via
# -r requirements.in
@@ -480,9 +484,10 @@ types-protobuf==4.22.0.0
# via temporalio
types-s3transfer==0.6.1
# via boto3-stubs
-typing-extensions==4.4.0
+typing-extensions==4.7.1
# via
# pydantic
+ # pydantic-core
# qrcode
# snowflake-connector-python
# temporalio