diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts
index 356d9a9a06cc6..ca37ff7676737 100644
--- a/frontend/src/scenes/scenes.ts
+++ b/frontend/src/scenes/scenes.ts
@@ -481,5 +481,4 @@ export const routes: Record = {
[urls.feedback()]: Scene.Feedback,
[urls.feedback() + '/*']: Scene.Feedback,
[urls.notebook(':shortId')]: Scene.Notebook,
- [urls.notebookEdit(':shortId')]: Scene.Notebook,
}
diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
similarity index 93%
rename from frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
rename to frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
index 1ac9395728811..1060246c67d27 100644
--- a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
@@ -9,7 +9,6 @@ import recordingSnapshotsJson from 'scenes/session-recordings/__mocks__/recordin
import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json'
import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
import recording_playlists from './__mocks__/recording_playlists.json'
-import { ReplayTabs } from '~/types'
const meta: Meta = {
title: 'Scenes-App/Recordings',
@@ -17,6 +16,7 @@ const meta: Meta = {
layout: 'fullscreen',
viewMode: 'story',
mockDate: '2023-02-01',
+ waitForSelector: '.PlayerFrame__content .replayer-wrapper iframe',
},
decorators: [
mswDecorator({
@@ -81,7 +81,7 @@ const meta: Meta = {
},
]
},
- '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings?limit=100': (req) => {
+ '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings': (req) => {
const playlistId = req.params.playlist_id
const response = playlistId === '1234567' ? recordings : []
return [200, { has_next: false, results: response, version: 1 }]
@@ -89,6 +89,12 @@ const meta: Meta = {
// without the session-recording-blob-replay feature flag, we only load via ClickHouse
'/api/projects/:team/session_recordings/:id/snapshots': recordingSnapshotsJson,
'/api/projects/:team/session_recordings/:id': recordingMetaJson,
+ 'api/projects/:team/notebooks': {
+ count: 0,
+ next: null,
+ previous: null,
+ results: [],
+ },
},
post: {
'/api/projects/:team/query': recordingEventsJson,
@@ -97,16 +103,10 @@ const meta: Meta = {
],
}
export default meta
-export function RecordingsList(): JSX.Element {
- useEffect(() => {
- router.actions.push(urls.replay())
- }, [])
- return
-}
-export function RecordingsPlayLists(): JSX.Element {
+export function RecentRecordings(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.replay(ReplayTabs.Playlists))
+ router.actions.push(urls.replay())
}, [])
return
}
diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
new file mode 100644
index 0000000000000..657fbccf4bc29
--- /dev/null
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
@@ -0,0 +1,48 @@
+import { Meta } from '@storybook/react'
+import { useEffect } from 'react'
+import { mswDecorator } from '~/mocks/browser'
+import { router } from 'kea-router'
+import { urls } from 'scenes/urls'
+import { App } from 'scenes/App'
+import recording_playlists from './__mocks__/recording_playlists.json'
+import { ReplayTabs } from '~/types'
+import recordings from 'scenes/session-recordings/__mocks__/recordings.json'
+import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
+
+const meta: Meta = {
+ title: 'Scenes-App/Recordings',
+ parameters: {
+ layout: 'fullscreen',
+ viewMode: 'story',
+ mockDate: '2023-02-01',
+ },
+ decorators: [
+ mswDecorator({
+ get: {
+ '/api/projects/:team_id/session_recording_playlists': recording_playlists,
+ '/api/projects/:team_id/session_recordings': (req) => {
+ const version = req.url.searchParams.get('version')
+ return [
+ 200,
+ {
+ has_next: false,
+ results: recordings,
+ version,
+ },
+ ]
+ },
+ },
+ post: {
+ '/api/projects/:team/query': recordingEventsJson,
+ },
+ }),
+ ],
+}
+export default meta
+
+export function RecordingsPlayLists(): JSX.Element {
+ useEffect(() => {
+ router.actions.push(urls.replay(ReplayTabs.Playlists))
+ }, [])
+ return
+}
diff --git a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
index f2db148045646..0afa00a98d244 100644
--- a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
+++ b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
@@ -1,6 +1,6 @@
[
{
- "id": "$pageview",
+ "id": "$pageview1",
"event": "$pageview",
"name": "$event_before_recording_starts",
"type": "events",
@@ -14,7 +14,7 @@
"elements_hash": ""
},
{
- "id": "$pageview",
+ "id": "$pageview2",
"name": "$pageview",
"event": "$pageview",
"type": "events",
diff --git a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx
index 94c1e95eb0ba6..e277a3b4a997a 100644
--- a/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx
+++ b/frontend/src/scenes/session-recordings/filters/AdvancedSessionRecordingsFilters.tsx
@@ -60,7 +60,8 @@ export const AdvancedSessionRecordingsFilters = ({
{ key: 'Custom', values: [] },
{ key: 'Last 24 hours', values: ['-24h'] },
{ key: 'Last 7 days', values: ['-7d'] },
- { key: 'Last 21 days', values: ['-21d'] },
+ { key: 'Last 30 days', values: ['-30d'] },
+ { key: 'All time', values: ['-90d'] },
]}
dropdownPlacement="bottom-start"
/>
diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
index fd023b710fc20..451f1cf616f8a 100644
--- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
+++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
@@ -4,17 +4,18 @@ import {
} from 'scenes/session-recordings/player/sessionRecordingPlayerLogic'
import { useActions, useValues } from 'kea'
import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton'
-import { IconComment, IconDelete, IconLink } from 'lib/lemon-ui/icons'
+import { IconComment, IconDelete, IconJournalPlus, IconLink } from 'lib/lemon-ui/icons'
import { openPlayerShareDialog } from 'scenes/session-recordings/player/share/PlayerShare'
import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover'
import { LemonDialog } from 'lib/lemon-ui/LemonDialog'
-import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton'
+import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton'
import { NotebookNodeType } from '~/types'
-import { dayjs } from 'lib/dayjs'
+import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic'
export function PlayerMetaLinks(): JSX.Element {
const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic)
const { setPause, deleteRecording } = useActions(sessionRecordingPlayerLogic)
+ const nodeLogic = useNotebookNode()
const getCurrentPlayerTime = (): number => {
// NOTE: We pull this value at call time as otherwise it would trigger re-renders if pulled from the hook
@@ -55,12 +56,11 @@ export function PlayerMetaLinks(): JSX.Element {
{![SessionRecordingPlayerMode.Sharing].includes(mode) ? (
<>
-
}
resource={{ type: NotebookNodeType.Recording, attrs: { id: sessionRecordingId } }}
onClick={() => setPause()}
- newNotebookTitle={`Notes ${dayjs().format('DD/MM')}`}
onNotebookOpened={(theNotebookLogic, theNodeLogic) => {
const time = getCurrentPlayerTime() * 1000
@@ -74,15 +74,30 @@ export function PlayerMetaLinks(): JSX.Element {
}}
>
Comment
-
+
} onClick={onShare} {...commonProps}>
Share
-
- Pin
-
+ {nodeLogic ? (
+ nodeLogic.props.nodeType !== NotebookNodeType.Recording ? (
+
}
+ size="small"
+ onClick={() => {
+ nodeLogic.actions.insertAfter({
+ type: NotebookNodeType.Recording,
+ attrs: { id: sessionRecordingId },
+ })
+ }}
+ />
+ ) : null
+ ) : (
+
+ Pin
+
+ )}
{logicProps.playerKey !== 'modal' && (
([
if (nextSourceToLoad) {
actions.loadRecordingSnapshotsV2(nextSourceToLoad)
- } else {
- actions.reportUsageIfFullyLoaded()
}
},
loadRecordingSnapshotsV1Success: ({ sessionPlayerSnapshotData }) => {
@@ -254,8 +252,6 @@ export const sessionRecordingDataLogic = kea([
if (values.sessionPlayerSnapshotData?.next) {
actions.loadRecordingSnapshotsV1(values.sessionPlayerSnapshotData?.next)
- } else {
- actions.reportUsageIfFullyLoaded()
}
if (values.chunkPaginationIndex === 1 || values.loadedFromBlobStorage) {
// Not always accurate that recording is playable after first chunk is loaded, but good guesstimate for now
@@ -265,10 +261,12 @@ export const sessionRecordingDataLogic = kea([
size: (values.sessionPlayerSnapshotData?.snapshots ?? []).length,
duration: Math.round(performance.now() - cache.snapshotsStartTime),
}
-
- actions.reportViewed()
}
},
+ loadRecordingSnapshotsSuccess: () => {
+ actions.reportViewed()
+ actions.reportUsageIfFullyLoaded()
+ },
loadRecordingSnapshotsV1Failure: () => {
actions.loadRecordingSnapshotsFailure()
},
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
index 04464fc9a87da..00508be3ab649 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
@@ -57,19 +57,32 @@ function UnusableEventsWarning(props: { unusableEventsInFilter: string[] }): JSX
)
}
+export type SessionRecordingsPlaylistProps = SessionRecordingListLogicProps & {
+ playlistShortId?: string
+ personUUID?: string
+ filters?: RecordingFilters
+ updateSearchParams?: boolean
+ onFiltersChange?: (filters: RecordingFilters) => void
+ autoPlay?: boolean
+ mode?: 'standard' | 'notebook'
+}
+
export function RecordingsLists({
playlistShortId,
personUUID,
filters: defaultFilters,
updateSearchParams,
+ ...props
}: SessionRecordingsPlaylistProps): JSX.Element {
- const logicProps = {
+ const logicProps: SessionRecordingListLogicProps = {
+ ...props,
playlistShortId,
personUUID,
filters: defaultFilters,
updateSearchParams,
}
const logic = sessionRecordingsListLogic(logicProps)
+
const {
filters,
hasNext,
@@ -244,11 +257,11 @@ export function RecordingsLists({
data-attr={'expand-replay-listing-from-default-seven-days-to-twenty-one'}
onClick={() => {
setFilters({
- date_from: '-21d',
+ date_from: '-30d',
})
}}
>
- Search over the last 21 days
+ Search over the last 30 days
>
) : (
@@ -285,33 +298,12 @@ export function RecordingsLists({
)
}
-export type SessionRecordingsPlaylistProps = {
- playlistShortId?: string
- personUUID?: string
- filters?: RecordingFilters
- updateSearchParams?: boolean
- onFiltersChange?: (filters: RecordingFilters) => void
- autoPlay?: boolean
- mode?: 'standard' | 'notebook'
-}
-
export function SessionRecordingsPlaylist(props: SessionRecordingsPlaylistProps): JSX.Element {
- const {
- playlistShortId,
- personUUID,
- filters: defaultFilters,
- updateSearchParams,
- onFiltersChange,
- autoPlay = true,
- } = props
+ const { playlistShortId } = props
const logicProps: SessionRecordingListLogicProps = {
- playlistShortId,
- personUUID,
- filters: defaultFilters,
- updateSearchParams,
- autoPlay,
- onFiltersChange,
+ ...props,
+ autoPlay: props.autoPlay ?? true,
}
const logic = sessionRecordingsListLogic(logicProps)
const {
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
index 99cb664cebc18..5d44e84618b36 100644
--- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
+++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
@@ -157,12 +157,8 @@ export const defaultPageviewPropertyEntityFilter = (
}
}
-export function generateSessionRecordingListLogicKey(props: SessionRecordingListLogicProps): string {
- return `${props.key}-${props.playlistShortId}-${props.personUUID}-${props.updateSearchParams ? '-with-search' : ''}`
-}
-
export interface SessionRecordingListLogicProps {
- key?: string
+ logicKey?: string
playlistShortId?: string
personUUID?: PersonUUID
filters?: RecordingFilters
@@ -174,7 +170,12 @@ export interface SessionRecordingListLogicProps {
export const sessionRecordingsListLogic = kea
([
path((key) => ['scenes', 'session-recordings', 'playlist', 'sessionRecordingsListLogic', key]),
props({} as SessionRecordingListLogicProps),
- key(generateSessionRecordingListLogicKey),
+ key(
+ (props: SessionRecordingListLogicProps) =>
+ `${props.logicKey}-${props.playlistShortId}-${props.personUUID}-${
+ props.updateSearchParams ? '-with-search' : ''
+ }`
+ ),
connect({
actions: [
eventUsageLogic,
diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx
index 6f72397bd4c54..d59ed4b674e69 100644
--- a/frontend/src/scenes/surveys/Survey.tsx
+++ b/frontend/src/scenes/surveys/Survey.tsx
@@ -60,7 +60,7 @@ export function SurveyComponent({ id }: { id?: string } = {}): JSX.Element {
export function SurveyForm({ id }: { id: string }): JSX.Element {
const { survey, surveyLoading, isEditingSurvey, hasTargetingFlag } = useValues(surveyLogic)
- const { loadSurvey, editingSurvey, setHasTargetingFlag } = useActions(surveyLogic)
+ const { loadSurvey, editingSurvey, setSurveyValue } = useActions(surveyLogic)
const { featureFlags } = useValues(enabledFeaturesLogic)
return (
@@ -374,7 +374,9 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
setHasTargetingFlag(true)}
+ onClick={() => {
+ setSurveyValue('targeting_flag_filters', { groups: [] })
+ }}
>
Add user targeting
@@ -389,7 +391,10 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
type="secondary"
status="danger"
className="w-max"
- onClick={() => setHasTargetingFlag(false)}
+ onClick={() => {
+ setSurveyValue('targeting_flag_filters', undefined)
+ setSurveyValue('targeting_flag', null)
+ }}
>
Remove all user properties
diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx
index dfe7de4895a4b..381f766f9abae 100644
--- a/frontend/src/scenes/surveys/SurveyView.tsx
+++ b/frontend/src/scenes/surveys/SurveyView.tsx
@@ -10,27 +10,22 @@ import { capitalizeFirstLetter } from 'lib/utils'
import { useState, useEffect } from 'react'
import { pluginsLogic } from 'scenes/plugins/pluginsLogic'
import { Query } from '~/queries/Query/Query'
-import { defaultSurveyAppearance, surveyLogic } from './surveyLogic'
+import { defaultSurveyAppearance, surveyEventName, surveyLogic } from './surveyLogic'
import { surveysLogic } from './surveysLogic'
import { PageHeader } from 'lib/components/PageHeader'
import { SurveyReleaseSummary } from './Survey'
import { SurveyAppearance } from './SurveyAppearance'
-import { SurveyQuestionType, SurveyType } from '~/types'
+import { PropertyFilterType, PropertyOperator, Survey, SurveyQuestionType, SurveyType } from '~/types'
import { SurveyAPIEditor } from './SurveyAPIEditor'
import { LemonBanner } from 'lib/lemon-ui/LemonBanner'
import { IconOpenInNew } from 'lib/lemon-ui/icons'
import { NodeKind } from '~/queries/schema'
+import { dayjs } from 'lib/dayjs'
+import { FEATURE_FLAGS } from 'lib/constants'
+import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
export function SurveyView({ id }: { id: string }): JSX.Element {
- const {
- survey,
- dataTableQuery,
- surveyLoading,
- surveyPlugin,
- surveyMetricsQueries,
- surveyDataVizQuery,
- showSurveyAppWarning,
- } = useValues(surveyLogic)
+ const { survey, surveyLoading, surveyPlugin, showSurveyAppWarning } = useValues(surveyLogic)
// TODO: survey results logic
// const { surveyImpressionsCount, surveyStartedCount, surveyCompletedCount } = useValues(surveyResultsLogic)
const { editingSurvey, updateSurvey, launchSurvey, stopSurvey, archiveSurvey, resumeSurvey } =
@@ -134,48 +129,7 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
? {
content: (
- {surveyMetricsQueries && (
-
- )}
- {survey.questions[0].type === SurveyQuestionType.Rating && (
-
-
-
- )}
- {(survey.questions[0].type === SurveyQuestionType.SingleChoice ||
- survey.questions[0].type === SurveyQuestionType.MultipleChoice) && (
-
- {survey.questions[0].type === SurveyQuestionType.SingleChoice ? (
-
- ) : (
-
- )}
-
- )}
- {surveyLoading ?
:
}
+
),
key: 'results',
@@ -305,7 +259,124 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
)
}
+export function SurveyResult({ disableEventsTable }: { disableEventsTable?: boolean }): JSX.Element {
+ const {
+ survey,
+ dataTableQuery,
+ surveyLoading,
+ surveyMetricsQueries,
+ surveyRatingQuery,
+ surveyMultipleChoiceQuery,
+ } = useValues(surveyLogic)
+ const { featureFlags } = useValues(featureFlagLogic)
+
+ return (
+ <>
+ {surveyMetricsQueries && (
+
+ )}
+ {survey.questions[0].type === SurveyQuestionType.Rating && (
+
+
+ {featureFlags[FEATURE_FLAGS.SURVEY_NPS_RESULTS] && survey.questions[0].scale === 10 && (
+ <>
+
+
NPS Score
+
+ >
+ )}
+
+ )}
+ {(survey.questions[0].type === SurveyQuestionType.SingleChoice ||
+ survey.questions[0].type === SurveyQuestionType.MultipleChoice) && (
+
+
+
+ )}
+ {!disableEventsTable && (surveyLoading ? : )}
+ >
+ )
+}
+
const OPT_IN_SNIPPET = `posthog.init('YOUR_PROJECT_API_KEY', {
api_host: 'YOUR API HOST',
opt_in_site_apps: true // <--- Add this line
})`
+
+function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element {
+ return (
+
+ )
+}
diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx
index 3d5fc423f40c7..3f18b2df4e154 100644
--- a/frontend/src/scenes/surveys/Surveys.tsx
+++ b/frontend/src/scenes/surveys/Surveys.tsx
@@ -112,17 +112,7 @@ export function Surveys(): JSX.Element {
title: 'Status',
width: 100,
render: function Render(_, survey: Survey) {
- const statusColors = {
- running: 'success',
- draft: 'default',
- complete: 'completion',
- } as Record
- const status = getSurveyStatus(survey)
- return (
-
- {status.toUpperCase()}
-
- )
+ return
},
},
{
@@ -243,3 +233,17 @@ export function Surveys(): JSX.Element {
)
}
+
+export function StatusTag({ survey }: { survey: Survey }): JSX.Element {
+ const statusColors = {
+ running: 'success',
+ draft: 'default',
+ complete: 'completion',
+ } as Record
+ const status = getSurveyStatus(survey)
+ return (
+
+ {status.toUpperCase()}
+
+ )
+}
diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx
index 4e935af2022b6..9c4fe305a8b79 100644
--- a/frontend/src/scenes/surveys/surveyLogic.tsx
+++ b/frontend/src/scenes/surveys/surveyLogic.tsx
@@ -27,7 +27,6 @@ import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic'
export interface NewSurvey
extends Pick<
Survey,
- | 'id'
| 'name'
| 'description'
| 'type'
@@ -40,6 +39,7 @@ export interface NewSurvey
| 'archived'
| 'appearance'
> {
+ id: 'new'
linked_flag_id: number | undefined
targeting_flag_filters: Pick | undefined
}
@@ -55,7 +55,7 @@ export const defaultSurveyAppearance = {
thankYouMessageHeader: 'Thank you for your feedback!',
}
-const NEW_SURVEY: NewSurvey = {
+export const NEW_SURVEY: NewSurvey = {
id: 'new',
name: '',
description: '',
@@ -76,73 +76,6 @@ export const surveyEventName = 'survey sent'
const SURVEY_RESPONSE_PROPERTY = '$survey_response'
-export const getSurveyDataQuery = (survey: Survey): DataTableNode => {
- const surveyDataQuery: DataTableNode = {
- kind: NodeKind.DataTableNode,
- source: {
- kind: NodeKind.EventsQuery,
- select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'],
- orderBy: ['timestamp DESC'],
- where: [`event == 'survey sent' or event == '${survey.name} survey sent'`],
- after: survey.created_at,
- properties: [
- {
- type: PropertyFilterType.Event,
- key: '$survey_id',
- operator: PropertyOperator.Exact,
- value: survey.id,
- },
- ],
- },
- propertiesViaUrl: true,
- showExport: true,
- showReload: true,
- showEventFilter: true,
- showPropertyFilter: true,
- }
- return surveyDataQuery
-}
-
-export const getSurveyMetricsQueries = (surveyId: string): SurveyMetricsQueries => {
- const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'`
- const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'`
- return {
- surveysShown: {
- kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysShownHogqlQuery },
- },
- surveysDismissed: {
- kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysDismissedHogqlQuery },
- },
- }
-}
-
-export const getSurveyDataVizQuery = (survey: Survey): InsightVizNode => {
- return {
- kind: NodeKind.InsightVizNode,
- source: {
- kind: NodeKind.TrendsQuery,
- dateRange: {
- date_from: dayjs(survey.created_at).format('YYYY-MM-DD'),
- date_to: dayjs().format('YYYY-MM-DD'),
- },
- properties: [
- {
- type: PropertyFilterType.Event,
- key: '$survey_id',
- operator: PropertyOperator.Exact,
- value: survey.id,
- },
- ],
- series: [{ event: surveyEventName, kind: NodeKind.EventsNode }],
- trendsFilter: { display: ChartDisplayType.ActionsBarValue },
- breakdown: { breakdown: '$survey_response', breakdown_type: 'event' },
- },
- showTable: true,
- }
-}
-
export interface SurveyLogicProps {
id: string | 'new'
}
@@ -153,9 +86,9 @@ export interface SurveyMetricsQueries {
}
export const surveyLogic = kea([
- path(['scenes', 'surveys', 'surveyLogic']),
props({} as SurveyLogicProps),
key(({ id }) => id),
+ path((key) => ['scenes', 'surveys', 'surveyLogic', key]),
connect(() => ({
actions: [
surveysLogic,
@@ -179,10 +112,6 @@ export const surveyLogic = kea([
stopSurvey: true,
archiveSurvey: true,
resumeSurvey: true,
- setDataTableQuery: (query: DataTableNode) => ({ query }),
- setSurveyMetricsQueries: (surveyMetricsQueries: SurveyMetricsQueries) => ({ surveyMetricsQueries }),
- setSurveyDataVizQuery: (surveyDataVizQuery: InsightVizNode) => ({ surveyDataVizQuery }),
- setHasTargetingFlag: (hasTargetingFlag: boolean) => ({ hasTargetingFlag }),
}),
loaders(({ props, actions }) => ({
survey: {
@@ -213,16 +142,6 @@ export const surveyLogic = kea([
},
})),
listeners(({ actions }) => ({
- loadSurveySuccess: ({ survey }) => {
- if (survey.start_date && survey.id !== 'new') {
- actions.setDataTableQuery(getSurveyDataQuery(survey as Survey))
- actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id))
- actions.setSurveyDataVizQuery(getSurveyDataVizQuery(survey as Survey))
- }
- if (survey.targeting_flag) {
- actions.setHasTargetingFlag(true)
- }
- },
createSurveySuccess: ({ survey }) => {
lemonToast.success(<>Survey {survey.name} created>)
actions.loadSurveys()
@@ -237,8 +156,6 @@ export const surveyLogic = kea([
},
launchSurveySuccess: ({ survey }) => {
lemonToast.success(<>Survey {survey.name} launched>)
- actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id))
- actions.setDataTableQuery(getSurveyDataQuery(survey))
actions.loadSurveys()
actions.reportSurveyLaunched(survey)
},
@@ -261,30 +178,6 @@ export const surveyLogic = kea([
editingSurvey: (_, { editing }) => editing,
},
],
- dataTableQuery: [
- null as DataTableNode | null,
- {
- setDataTableQuery: (_, { query }) => query,
- },
- ],
- surveyMetricsQueries: [
- null as SurveyMetricsQueries | null,
- {
- setSurveyMetricsQueries: (_, { surveyMetricsQueries }) => surveyMetricsQueries,
- },
- ],
- surveyDataVizQuery: [
- null as InsightVizNode | null,
- {
- setSurveyDataVizQuery: (_, { surveyDataVizQuery }) => surveyDataVizQuery,
- },
- ],
- hasTargetingFlag: [
- false,
- {
- setHasTargetingFlag: (_, { hasTargetingFlag }) => hasTargetingFlag,
- },
- ],
}),
selectors({
isSurveyRunning: [
@@ -320,6 +213,142 @@ export const surveyLogic = kea([
)
},
],
+ dataTableQuery: [
+ (s) => [s.survey],
+ (survey): DataTableNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+ const createdAt = (survey as Survey).created_at
+
+ return {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.EventsQuery,
+ select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'],
+ orderBy: ['timestamp DESC'],
+ where: [`event == 'survey sent' or event == '${survey.name} survey sent'`],
+ after: createdAt,
+ properties: [
+ {
+ type: PropertyFilterType.Event,
+ key: '$survey_id',
+ operator: PropertyOperator.Exact,
+ value: survey.id,
+ },
+ ],
+ },
+ propertiesViaUrl: true,
+ showExport: true,
+ showReload: true,
+ showEventFilter: true,
+ showPropertyFilter: true,
+ showTimings: false,
+ }
+ },
+ ],
+ surveyMetricsQueries: [
+ (s) => [s.survey],
+ (survey): SurveyMetricsQueries | null => {
+ const surveyId = survey.id
+ if (surveyId === 'new') {
+ return null
+ }
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
+
+ const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' `
+ const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}' and timestamp >= '${startDate}' and timestamp <= '${endDate}'`
+ return {
+ surveysShown: {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysShownHogqlQuery,
+ },
+ showTimings: false,
+ },
+ surveysDismissed: {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysDismissedHogqlQuery,
+ },
+ showTimings: false,
+ },
+ }
+ },
+ ],
+ surveyRatingQuery: [
+ (s) => [s.survey],
+ (survey): InsightVizNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
+
+ return {
+ kind: NodeKind.InsightVizNode,
+ source: {
+ kind: NodeKind.TrendsQuery,
+ dateRange: {
+ date_from: startDate,
+ date_to: endDate,
+ },
+ properties: [
+ {
+ type: PropertyFilterType.Event,
+ key: '$survey_id',
+ operator: PropertyOperator.Exact,
+ value: survey.id,
+ },
+ ],
+ series: [{ event: surveyEventName, kind: NodeKind.EventsNode }],
+ trendsFilter: { display: ChartDisplayType.ActionsBarValue },
+ breakdown: { breakdown: '$survey_response', breakdown_type: 'event' },
+ },
+ showTable: true,
+ }
+ },
+ ],
+ surveyMultipleChoiceQuery: [
+ (s) => [s.survey],
+ (survey): DataTableNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+
+ const startDate = dayjs((survey as Survey).created_at).format('YYYY-MM-DD')
+ const endDate = survey.end_date
+ ? dayjs(survey.end_date).format('YYYY-MM-DD')
+ : dayjs().add(1, 'day').format('YYYY-MM-DD')
+
+ const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc`
+ const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' and timestamp >= '${startDate}' and timestamp <= '${endDate}' group by choice order by count() desc`
+ return {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query:
+ survey.questions[0].type === SurveyQuestionType.SingleChoice
+ ? singleChoiceQuery
+ : multipleChoiceQuery,
+ },
+ showTimings: false,
+ }
+ },
+ ],
+ hasTargetingFlag: [
+ (s) => [s.survey],
+ (survey): boolean => {
+ return !!survey.targeting_flag || !!(survey.id === 'new' && survey.targeting_flag_filters)
+ },
+ ],
}),
forms(({ actions, props, values }) => ({
survey: {
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index 2450187f8303d..d933d06f4e196 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -185,5 +185,4 @@ export const urls = {
tab: 'notebooks',
}).url,
notebook: (shortId: string): string => `/notebooks/${shortId}`,
- notebookEdit: (shortId: string): string => `/notebooks/${shortId}/edit`,
}
diff --git a/frontend/src/styles/utilities.scss b/frontend/src/styles/utilities.scss
index 126d981427e89..745375f1c3f57 100644
--- a/frontend/src/styles/utilities.scss
+++ b/frontend/src/styles/utilities.scss
@@ -919,6 +919,13 @@ $decorations: underline, overline, line-through, no-underline;
}
}
+.list-inside {
+ list-style-position: inside;
+}
+.list-outside {
+ list-style-position: outside;
+}
+
.shadow {
box-shadow: var(--shadow-elevation);
}
diff --git a/frontend/src/toolbar/button/HedgehogButton.tsx b/frontend/src/toolbar/button/HedgehogButton.tsx
index 634ea3a38732e..44f1b20ae2d0b 100644
--- a/frontend/src/toolbar/button/HedgehogButton.tsx
+++ b/frontend/src/toolbar/button/HedgehogButton.tsx
@@ -45,6 +45,7 @@ export function HedgehogButton(): JSX.Element {
onPositionChange={(actor) => {
saveDragPosition(actor.x + SPRITE_SIZE * 0.5, -actor.y - SPRITE_SIZE * 0.5)
}}
+ isDarkModeOn={false}
/>
)}
>
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index cf091c4c88296..de8b30192d4a6 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -1793,7 +1793,6 @@ export enum RecordingWindowFilter {
export interface EditorFilterProps {
query: InsightQueryNode
- setQuery: (node: InsightQueryNode) => void
insightProps: InsightLogicProps
}
@@ -2055,6 +2054,7 @@ export interface InsightLogicProps {
doNotLoad?: boolean
/** query when used as ad-hoc insight */
query?: InsightVizNode
+ setQuery?: (node: InsightVizNode) => void
}
export interface SetInsightOptions {
@@ -2191,6 +2191,7 @@ export interface FeatureFlagType extends Omit
+ type: NotebookNodeType
+}
+
export enum NotebookTarget {
Popover = 'popover',
Auto = 'auto',
@@ -3093,6 +3097,8 @@ export type BatchExportDestinationS3 = {
aws_secret_access_key: string
exclude_events: string[]
compression: string | null
+ encryption: string | null
+ kms_key_id: string | null
}
}
diff --git a/latest_migrations.manifest b/latest_migrations.manifest
index 84d604bfc1357..233b3d446d5cb 100644
--- a/latest_migrations.manifest
+++ b/latest_migrations.manifest
@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0015_add_verified_properties
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
-posthog: 0347_add_bigquery_export_type
+posthog: 0350_add_notebook_text_content
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019
diff --git a/package.json b/package.json
index 1fee283b008d2..3f8131541f4c5 100644
--- a/package.json
+++ b/package.json
@@ -73,7 +73,7 @@
"@monaco-editor/react": "4.4.6",
"@posthog/plugin-scaffold": "^1.3.2",
"@react-hook/size": "^2.1.2",
- "@rrweb/types": "^2.0.0-alpha.9",
+ "@rrweb/types": "^2.0.0-alpha.11",
"@sentry/react": "7.22.0",
"@testing-library/dom": ">=7.21.4",
"@tiptap/core": "^2.1.0-rc.12",
@@ -125,7 +125,8 @@
"kea-window-values": "^3.0.0",
"md5": "^2.3.0",
"monaco-editor": "^0.39.0",
- "posthog-js": "1.78.1",
+ "papaparse": "^5.4.1",
+ "posthog-js": "1.78.5",
"posthog-js-lite": "2.0.0-alpha5",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
@@ -154,7 +155,7 @@
"react-virtualized": "^9.22.5",
"require-from-string": "^2.0.2",
"resize-observer-polyfill": "^1.5.1",
- "rrweb": "^2.0.0-alpha.9",
+ "rrweb": "^2.0.0-alpha.11",
"sass": "^1.26.2",
"use-debounce": "^9.0.3",
"use-resize-observer": "^8.0.0",
@@ -206,6 +207,7 @@
"@types/jest-image-snapshot": "^6.1.0",
"@types/md5": "^2.3.0",
"@types/node": "^18.11.9",
+ "@types/papaparse": "^5.3.8",
"@types/pixelmatch": "^5.2.4",
"@types/pngjs": "^6.0.1",
"@types/query-selector-shadow-dom": "^1.0.0",
diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png
index 3b185216c6362..8b8203a70dcf5 100644
Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png differ
diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png
index 2ce1d7971c1e1..8b8203a70dcf5 100644
Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Shown-Mobile-1-chromium-linux.png differ
diff --git a/plugin-server/functional_tests/webhooks.test.ts b/plugin-server/functional_tests/webhooks.test.ts
index 0fb7155790034..82f1bfe9bf186 100644
--- a/plugin-server/functional_tests/webhooks.test.ts
+++ b/plugin-server/functional_tests/webhooks.test.ts
@@ -199,6 +199,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
properties: {
$creator_event_uuid: eventUuid,
$initial_current_url: 'http://localhost:8000',
+ $current_url: 'http://localhost:8000',
email: 't@t.com',
},
uuid: expect.any(String),
@@ -208,6 +209,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
$sent_at: expect.any(String),
$set: {
email: 't@t.com',
+ $current_url: 'http://localhost:8000',
},
$set_once: {
$initial_current_url: 'http://localhost:8000',
diff --git a/plugin-server/package.json b/plugin-server/package.json
index be9bebdd9b2cc..e2d766f344bba 100644
--- a/plugin-server/package.json
+++ b/plugin-server/package.json
@@ -11,6 +11,7 @@
"start": "pnpm start:dist",
"start:dist": "BASE_DIR=.. node dist/index.js",
"start:dev": "NODE_ENV=dev BASE_DIR=.. nodemon --watch src/ --exec node -r @swc-node/register src/index.ts",
+ "start:devNoWatch": "NODE_ENV=dev BASE_DIR=.. node -r @swc-node/register src/index.ts",
"build": "pnpm clean && pnpm compile",
"clean": "rm -rf dist/*",
"typescript:compile": "tsc -b",
diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts
index ef98937b81bff..f3245ce62232e 100644
--- a/plugin-server/src/config/config.ts
+++ b/plugin-server/src/config/config.ts
@@ -7,6 +7,8 @@ import {
KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW,
} from './kafka-topics'
+export const DEFAULT_HTTP_SERVER_PORT = 6738
+
export const defaultConfig = overrideWithEnv(getDefaultConfig())
export function getDefaultConfig(): PluginsServerConfig {
@@ -44,6 +46,7 @@ export function getDefaultConfig(): PluginsServerConfig {
KAFKA_SASL_PASSWORD: undefined,
KAFKA_CLIENT_RACK: undefined,
KAFKA_CONSUMPTION_USE_RDKAFKA: false, // Transitional setting, ignored for consumers that only support one library
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: true, // If true, use the cooperative rebalance strategy, otherwise uses the default ('range,roundrobin')
KAFKA_CONSUMPTION_MAX_BYTES: 10_485_760, // Default value for kafkajs
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: 1_048_576, // Default value for kafkajs, must be bigger than message size
KAFKA_CONSUMPTION_MAX_WAIT_MS: 1_000, // Down from the 5s default for kafkajs
@@ -74,6 +77,7 @@ export function getDefaultConfig(): PluginsServerConfig {
SENTRY_DSN: null,
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: 0,
SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: 0,
+ HTTP_SERVER_PORT: DEFAULT_HTTP_SERVER_PORT,
STATSD_HOST: null,
STATSD_PORT: 8125,
STATSD_PREFIX: 'plugin-server.',
@@ -116,6 +120,7 @@ export function getDefaultConfig(): PluginsServerConfig {
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password',
OBJECT_STORAGE_BUCKET: 'posthog',
PLUGIN_SERVER_MODE: null,
+ PLUGIN_LOAD_SEQUENTIALLY: false,
KAFKAJS_LOG_LEVEL: 'WARN',
HISTORICAL_EXPORTS_ENABLED: true,
HISTORICAL_EXPORTS_MAX_RETRY_COUNT: 15,
@@ -126,6 +131,12 @@ export function getDefaultConfig(): PluginsServerConfig {
USE_KAFKA_FOR_SCHEDULED_TASKS: true,
CLOUD_DEPLOYMENT: 'default', // Used as a Sentry tag
+ STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes
+ STARTUP_PROFILE_CPU: false,
+ STARTUP_PROFILE_HEAP: false,
+ STARTUP_PROFILE_HEAP_INTERVAL: 512 * 1024, // default v8 value
+ STARTUP_PROFILE_HEAP_DEPTH: 16, // default v8 value
+
SESSION_RECORDING_KAFKA_HOSTS: undefined,
SESSION_RECORDING_KAFKA_SECURITY_PROTOCOL: undefined,
SESSION_RECORDING_KAFKA_BATCH_SIZE: 500,
diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts
index a82aed8861098..03c9e2de6db37 100644
--- a/plugin-server/src/kafka/batch-consumer.ts
+++ b/plugin-server/src/kafka/batch-consumer.ts
@@ -1,11 +1,12 @@
import { GlobalConfig, KafkaConsumer, Message } from 'node-rdkafka-acosom'
-import { exponentialBuckets, Histogram } from 'prom-client'
+import { exponentialBuckets, Gauge, Histogram } from 'prom-client'
import { status } from '../utils/status'
import { createAdminClient, ensureTopicExists } from './admin'
import {
commitOffsetsForMessages,
consumeMessages,
+ countPartitionsPerTopic,
createKafkaConsumer,
disconnectConsumer,
instrumentConsumerMetrics,
@@ -32,6 +33,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs,
eachBatch,
autoCommit = true,
+ cooperativeRebalance = true,
queuedMinMessages = 100000,
}: {
connectionConfig: GlobalConfig
@@ -47,6 +49,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs: number
eachBatch: (messages: Message[]) => Promise
autoCommit?: boolean
+ cooperativeRebalance?: boolean
queuedMinMessages?: number
}): Promise => {
// Starts consuming from `topic` in batches of `fetchBatchSize` messages,
@@ -112,12 +115,12 @@ export const startBatchConsumer = async ({
// https://www.confluent.io/en-gb/blog/incremental-cooperative-rebalancing-in-kafka/
// for details on the advantages of this rebalancing strategy as well as
// how it works.
- 'partition.assignment.strategy': 'cooperative-sticky',
+ 'partition.assignment.strategy': cooperativeRebalance ? 'cooperative-sticky' : 'range,roundrobin',
rebalance_cb: true,
offset_commit_cb: true,
})
- instrumentConsumerMetrics(consumer, groupId)
+ instrumentConsumerMetrics(consumer, groupId, cooperativeRebalance)
let isShuttingDown = false
let lastLoopTime = Date.now()
@@ -181,6 +184,10 @@ export const startBatchConsumer = async ({
continue
}
+ for (const [topic, count] of countPartitionsPerTopic(consumer.assignments())) {
+ kafkaAbsolutePartitionCount.labels({ topic }).set(count)
+ }
+
status.debug('🔁', 'main_loop_consumed', { messagesLength: messages.length })
if (!messages.length) {
status.debug('🔁', 'main_loop_empty_batch', { cause: 'empty' })
@@ -278,3 +285,9 @@ const consumedMessageSizeBytes = new Histogram({
labelNames: ['topic', 'groupId', 'messageType'],
buckets: exponentialBuckets(1, 8, 4).map((bucket) => bucket * 1024),
})
+
+const kafkaAbsolutePartitionCount = new Gauge({
+ name: 'kafka_absolute_partition_count',
+ help: 'Number of partitions assigned to this consumer. (Absolute value from the consumer state.)',
+ labelNames: ['topic'],
+})
diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts
index f3b3a91d2be44..62b8e951ebc9f 100644
--- a/plugin-server/src/kafka/consumer.ts
+++ b/plugin-server/src/kafka/consumer.ts
@@ -1,4 +1,5 @@
import {
+ Assignment,
ClientMetrics,
CODES,
ConsumerGlobalConfig,
@@ -9,7 +10,7 @@ import {
TopicPartitionOffset,
} from 'node-rdkafka-acosom'
-import { latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics'
+import { kafkaRebalancePartitionCount, latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics'
import { status } from '../utils/status'
export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => {
@@ -54,7 +55,24 @@ export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => {
})
})
}
-export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: string) => {
+
+export function countPartitionsPerTopic(assignments: Assignment[]): Map {
+ const partitionsPerTopic = new Map()
+ for (const assignment of assignments) {
+ if (partitionsPerTopic.has(assignment.topic)) {
+ partitionsPerTopic.set(assignment.topic, partitionsPerTopic.get(assignment.topic) + 1)
+ } else {
+ partitionsPerTopic.set(assignment.topic, 1)
+ }
+ }
+ return partitionsPerTopic
+}
+
+export const instrumentConsumerMetrics = (
+ consumer: RdKafkaConsumer,
+ groupId: string,
+ cooperativeRebalance: boolean
+) => {
// For each message consumed, we record the latest timestamp processed for
// each partition assigned to this consumer group member. This consumer
// should only provide metrics for the partitions that are assigned to it,
@@ -79,6 +97,7 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
//
// TODO: add other relevant metrics here
// TODO: expose the internal librdkafka metrics as well.
+ const strategyString = cooperativeRebalance ? 'cooperative' : 'eager'
consumer.on('rebalance', (error: LibrdKafkaError, assignments: TopicPartition[]) => {
/**
* see https://github.com/Blizzard/node-rdkafka#rebalancing errors are used to signal
@@ -88,9 +107,23 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
* And when the balancing is completed the new assignments are received with ERR__ASSIGN_PARTITIONS
*/
if (error.code === CODES.ERRORS.ERR__ASSIGN_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance, partitions assigned', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance, partitions assigned`, { assignments })
+ for (const [topic, count] of countPartitionsPerTopic(assignments)) {
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
+ }
} else if (error.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance started, partitions revoked', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance started, partitions revoked`, { assignments })
+ for (const [topic, count] of countPartitionsPerTopic(assignments)) {
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
+ }
} else {
// We had a "real" error
status.error('⚠️', 'rebalance_error', { error })
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
index a97d034778ac4..4d12925f0ce6b 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
@@ -4,20 +4,18 @@ import { EachBatchPayload, KafkaMessage } from 'kafkajs'
import { RawClickHouseEvent } from '../../../types'
import { convertToIngestionEvent } from '../../../utils/event'
import { status } from '../../../utils/status'
-import { groupIntoBatches } from '../../../utils/utils'
import { runInstrumentedFunction } from '../../utils'
import { KafkaJSIngestionConsumer } from '../kafka-queue'
import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
+import { eachBatchHandlerHelper } from './each-batch-webhooks'
// Must require as `tsc` strips unused `import` statements and just requiring this seems to init some globals
require('@sentry/tracing')
export async function eachMessageAppsOnEventHandlers(
- message: KafkaMessage,
+ clickHouseEvent: RawClickHouseEvent,
queue: KafkaJSIngestionConsumer
): Promise {
- const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
-
const pluginConfigs = queue.pluginsServer.pluginConfigsPerTeam.get(clickHouseEvent.team_id)
if (pluginConfigs) {
// Elements parsing can be extremely slow, so we skip it for some plugins
@@ -50,7 +48,14 @@ export async function eachBatchAppsOnEventHandlers(
payload: EachBatchPayload,
queue: KafkaJSIngestionConsumer
): Promise {
- await eachBatch(payload, queue, eachMessageAppsOnEventHandlers, groupIntoBatches, 'async_handlers_on_event')
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => queue.pluginsServer.pluginConfigsPerTeam.has(teamId),
+ (event) => eachMessageAppsOnEventHandlers(event, queue),
+ queue.pluginsServer.statsd,
+ queue.pluginsServer.WORKER_CONCURRENCY * queue.pluginsServer.TASKS_PER_WORKER,
+ 'on_event'
+ )
}
export async function eachBatch(
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
index 427297a613b1b..fb671f0cd9633 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
@@ -17,10 +17,10 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
require('@sentry/tracing')
// exporting only for testing
-export function groupIntoBatchesWebhooks(
+export function groupIntoBatchesByUsage(
array: KafkaMessage[],
batchSize: number,
- actionMatcher: ActionMatcher
+ shouldProcess: (teamId: number) => boolean
): { eventBatch: RawClickHouseEvent[]; lastOffset: string; lastTimestamp: string }[] {
// Most events will not trigger a webhook call, so we want to filter them out as soon as possible
// to achieve the highest effective concurrency when executing the actual HTTP calls.
@@ -32,7 +32,7 @@ export function groupIntoBatchesWebhooks(
let currentCount = 0
array.forEach((message, index) => {
const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
- if (actionMatcher.hasWebhooks(clickHouseEvent.team_id)) {
+ if (shouldProcess(clickHouseEvent.team_id)) {
currentBatch.push(clickHouseEvent)
currentCount++
} else {
@@ -58,18 +58,36 @@ export async function eachBatchWebhooksHandlers(
hookCannon: HookCommander,
statsd: StatsD | undefined,
concurrency: number
+): Promise {
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => actionMatcher.hasWebhooks(teamId),
+ (event) => eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd),
+ statsd,
+ concurrency,
+ 'webhooks'
+ )
+}
+
+export async function eachBatchHandlerHelper(
+ payload: EachBatchPayload,
+ shouldProcess: (teamId: number) => boolean,
+ eachMessageHandler: (event: RawClickHouseEvent) => Promise,
+ statsd: StatsD | undefined,
+ concurrency: number,
+ stats_key: string
): Promise {
// similar to eachBatch function in each-batch.ts, but without the dependency on the KafkaJSIngestionConsumer
// & handling the different batching return type
- const key = 'async_handlers_webhooks'
+ const key = `async_handlers_${stats_key}`
const batchStartTimer = new Date()
const loggingKey = `each_batch_${key}`
const { batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, isRunning, isStale }: EachBatchPayload = payload
- const transaction = Sentry.startTransaction({ name: `eachBatchWebhooks` })
+ const transaction = Sentry.startTransaction({ name: `eachBatch${stats_key}` })
try {
- const batchesWithOffsets = groupIntoBatchesWebhooks(batch.messages, concurrency, actionMatcher)
+ const batchesWithOffsets = groupIntoBatchesByUsage(batch.messages, concurrency, shouldProcess)
statsd?.histogram('ingest_event_batching.input_length', batch.messages.length, { key: key })
statsd?.histogram('ingest_event_batching.batch_count', batchesWithOffsets.length, { key: key })
@@ -88,9 +106,7 @@ export async function eachBatchWebhooksHandlers(
}
await Promise.all(
- eventBatch.map((event: RawClickHouseEvent) =>
- eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd).finally(() => heartbeat())
- )
+ eventBatch.map((event: RawClickHouseEvent) => eachMessageHandler(event).finally(() => heartbeat()))
)
resolveOffset(lastOffset)
diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
index da51173e0507f..7989efd4b356a 100644
--- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts
+++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
@@ -255,6 +255,7 @@ export class IngestionConsumer {
consumerMaxWaitMs: this.pluginsServer.KAFKA_CONSUMPTION_MAX_WAIT_MS,
fetchBatchSize: 500,
topicCreationTimeoutMs: this.pluginsServer.KAFKA_TOPIC_CREATION_TIMEOUT_MS,
+ cooperativeRebalance: this.pluginsServer.KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE,
eachBatch: (payload) => this.eachBatchConsumer(payload),
})
this.consumerReady = true
diff --git a/plugin-server/src/main/ingestion-queues/metrics.ts b/plugin-server/src/main/ingestion-queues/metrics.ts
index 97188247cbefa..099832e1ea14c 100644
--- a/plugin-server/src/main/ingestion-queues/metrics.ts
+++ b/plugin-server/src/main/ingestion-queues/metrics.ts
@@ -2,6 +2,12 @@
import { Counter, Gauge } from 'prom-client'
+export const kafkaRebalancePartitionCount = new Gauge({
+ name: 'kafka_rebalance_partition_count',
+ help: 'Number of partitions assigned to this consumer. (Calculated during rebalance events.)',
+ labelNames: ['topic'],
+})
+
export const latestOffsetTimestampGauge = new Gauge({
name: 'latest_processed_timestamp_ms',
help: 'Timestamp of the latest offset that has been committed.',
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
index bf0a242496fd3..c9dacf1fabdef 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
@@ -105,16 +105,6 @@ export class ReplayEventsIngester {
return drop('producer_not_ready')
}
- if (event.replayIngestionConsumer !== 'v2') {
- eventDroppedCounter
- .labels({
- event_type: 'session_recordings_replay_events',
- drop_cause: 'not_target_consumer',
- })
- .inc()
- return
- }
-
if (
await this.offsetHighWaterMarker.isBelowHighWaterMark(
event.metadata,
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
index 3a6e9b291c602..8e0473df357fe 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
@@ -279,7 +279,6 @@ export class SessionRecordingIngesterV2 {
session_id: event.properties?.$session_id,
window_id: event.properties?.$window_id,
events: event.properties.$snapshot_items,
- replayIngestionConsumer: event.properties?.$snapshot_consumer ?? 'v1',
}
return recordingMessage
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/types.ts b/plugin-server/src/main/ingestion-queues/session-recording/types.ts
index c29c1ad81f1db..6bff13bbde468 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/types.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/types.ts
@@ -14,8 +14,6 @@ export type IncomingRecordingMessage = {
session_id: string
window_id?: string
events: RRWebEvent[]
- // NOTE: This is only for migrating from one consumer to the other
- replayIngestionConsumer: 'v1' | 'v2'
}
// This is the incoming message from Kafka
diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts
index a89355cb51c98..08fc4c6ed0e66 100644
--- a/plugin-server/src/main/pluginsServer.ts
+++ b/plugin-server/src/main/pluginsServer.ts
@@ -1,10 +1,12 @@
import * as Sentry from '@sentry/node'
+import fs from 'fs'
import { Server } from 'http'
import { CompressionCodecs, CompressionTypes, Consumer, KafkaJSProtocolError } from 'kafkajs'
// @ts-expect-error no type definitions
import SnappyCodec from 'kafkajs-snappy'
import * as schedule from 'node-schedule'
import { Counter } from 'prom-client'
+import v8Profiler from 'v8-profiler-next'
import { getPluginServerCapabilities } from '../capabilities'
import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config'
@@ -63,6 +65,7 @@ export async function startPluginsServer(
status.updatePrompt(serverConfig.PLUGIN_SERVER_MODE)
status.info('ℹ️', `${serverConfig.WORKER_CONCURRENCY} workers, ${serverConfig.TASKS_PER_WORKER} tasks per worker`)
+ runStartupProfiles(serverConfig)
// Structure containing initialized clients for Postgres, Kafka, Redis, etc.
let hub: Hub | undefined
@@ -444,7 +447,7 @@ export async function startPluginsServer(
}
if (capabilities.http) {
- httpServer = createHttpServer(healthChecks, analyticsEventsIngestionConsumer)
+ httpServer = createHttpServer(serverConfig.HTTP_SERVER_PORT, healthChecks, analyticsEventsIngestionConsumer)
}
// If session recordings consumer is defined, then join it. If join
@@ -508,3 +511,26 @@ const kafkaProtocolErrors = new Counter({
help: 'Kafka protocol errors encountered, by type',
labelNames: ['type', 'code'],
})
+
+function runStartupProfiles(config: PluginsServerConfig) {
+ if (config.STARTUP_PROFILE_CPU) {
+ status.info('🩺', `Collecting cpu profile...`)
+ v8Profiler.setGenerateType(1)
+ v8Profiler.startProfiling('startup', true)
+ setTimeout(() => {
+ const profile = v8Profiler.stopProfiling('startup')
+ fs.writeFileSync('./startup.cpuprofile', JSON.stringify(profile))
+ status.info('🩺', `Wrote cpu profile to disk`)
+ profile.delete()
+ }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000)
+ }
+ if (config.STARTUP_PROFILE_HEAP) {
+ status.info('🩺', `Collecting heap profile...`)
+ v8Profiler.startSamplingHeapProfiling(config.STARTUP_PROFILE_HEAP_INTERVAL, config.STARTUP_PROFILE_HEAP_DEPTH)
+ setTimeout(() => {
+ const profile = v8Profiler.stopSamplingHeapProfiling()
+ fs.writeFileSync('./startup.heapprofile', JSON.stringify(profile))
+ status.info('🩺', `Wrote heap profile to disk`)
+ }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000)
+ }
+}
diff --git a/plugin-server/src/main/services/http-server.ts b/plugin-server/src/main/services/http-server.ts
index bccee47d21e2f..89716d23366eb 100644
--- a/plugin-server/src/main/services/http-server.ts
+++ b/plugin-server/src/main/services/http-server.ts
@@ -5,13 +5,12 @@ import * as prometheus from 'prom-client'
import { status } from '../../utils/status'
-export const HTTP_SERVER_PORT = 6738
-
prometheus.collectDefaultMetrics()
const v8Profiler = require('v8-profiler-next')
v8Profiler.setGenerateType(1)
export function createHttpServer(
+ port: number,
healthChecks: { [service: string]: () => Promise | boolean },
analyticsEventsIngestionConsumer?: KafkaJSIngestionConsumer | IngestionConsumer
): Server {
@@ -47,7 +46,7 @@ export function createHttpServer(
// }
// }
const checkResults = await Promise.all(
- // Note that we do not ues `Promise.allSettled` here so we can
+ // Note that we do not use `Promise.allSettled` here so we can
// assume that all promises have resolved. If there was a
// rejected promise, the http server should catch it and return
// a 500 status code.
@@ -118,8 +117,8 @@ export function createHttpServer(
}
})
- server.listen(HTTP_SERVER_PORT, () => {
- status.info('🩺', `Status server listening on port ${HTTP_SERVER_PORT}`)
+ server.listen(port, () => {
+ status.info('🩺', `Status server listening on port ${port}`)
})
return server
@@ -155,8 +154,13 @@ function exportProfile(req: IncomingMessage, res: ServerResponse) {
}, durationSeconds * 1000)
break
case 'heap':
+ // Additional params for sampling heap profile, higher precision means bigger profile.
+ // Defaults are taken from https://v8.github.io/api/head/classv8_1_1HeapProfiler.html
+ const interval = url.searchParams.get('interval') ? parseInt(url.searchParams.get('interval')!) : 512 * 1024
+ const depth = url.searchParams.get('depth') ? parseInt(url.searchParams.get('depth')!) : 16
+
sendHeaders('heapprofile')
- v8Profiler.startSamplingHeapProfiling()
+ v8Profiler.startSamplingHeapProfiling(interval, depth)
setTimeout(() => {
outputProfileResult(res, type, v8Profiler.stopSamplingHeapProfiling())
}, durationSeconds * 1000)
diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts
index 62463957ad249..9cc7fbfa21687 100644
--- a/plugin-server/src/types.ts
+++ b/plugin-server/src/types.ts
@@ -21,7 +21,7 @@ import { VM } from 'vm2'
import { ObjectStorage } from './main/services/object_storage'
import { DB } from './utils/db/db'
import { KafkaProducerWrapper } from './utils/db/kafka-producer-wrapper'
-import { PostgresRouter } from './utils/db/postgres' /** Re-export Element from scaffolding, for backwards compat. */
+import { PostgresRouter } from './utils/db/postgres'
import { UUID } from './utils/utils'
import { AppMetrics } from './worker/ingestion/app-metrics'
import { EventPipelineResult } from './worker/ingestion/event-pipeline/runner'
@@ -33,8 +33,7 @@ import { RootAccessManager } from './worker/vm/extensions/helpers/root-acess-man
import { LazyPluginVM } from './worker/vm/lazy'
import { PromiseManager } from './worker/vm/promise-manager'
-/** Re-export Element from scaffolding, for backwards compat. */
-export { Element } from '@posthog/plugin-scaffold'
+export { Element } from '@posthog/plugin-scaffold' // Re-export Element from scaffolding, for backwards compat.
type Brand = K & { __brand: T }
@@ -130,6 +129,7 @@ export interface PluginsServerConfig {
KAFKA_SASL_PASSWORD: string | undefined
KAFKA_CLIENT_RACK: string | undefined
KAFKA_CONSUMPTION_USE_RDKAFKA: boolean
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: boolean
KAFKA_CONSUMPTION_MAX_BYTES: number
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: number
KAFKA_CONSUMPTION_MAX_WAIT_MS: number // fetch.wait.max.ms rdkafka parameter
@@ -151,6 +151,7 @@ export interface PluginsServerConfig {
SENTRY_DSN: string | null
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: number // Rate of tracing in plugin server (between 0 and 1)
SENTRY_PLUGIN_SERVER_PROFILING_SAMPLE_RATE: number // Rate of profiling in plugin server (between 0 and 1)
+ HTTP_SERVER_PORT: number
STATSD_HOST: string | null
STATSD_PORT: number
STATSD_PREFIX: string
@@ -189,6 +190,7 @@ export interface PluginsServerConfig {
OBJECT_STORAGE_SECRET_ACCESS_KEY: string
OBJECT_STORAGE_BUCKET: string // the object storage bucket name
PLUGIN_SERVER_MODE: PluginServerMode | null
+ PLUGIN_LOAD_SEQUENTIALLY: boolean // could help with reducing memory usage spikes on startup
KAFKAJS_LOG_LEVEL: 'NOTHING' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'
HISTORICAL_EXPORTS_ENABLED: boolean // enables historical exports for export apps
HISTORICAL_EXPORTS_MAX_RETRY_COUNT: number
@@ -201,6 +203,13 @@ export interface PluginsServerConfig {
EVENT_OVERFLOW_BUCKET_REPLENISH_RATE: number
CLOUD_DEPLOYMENT: string
+ // dump profiles to disk, covering the first N seconds of runtime
+ STARTUP_PROFILE_DURATION_SECONDS: number
+ STARTUP_PROFILE_CPU: boolean
+ STARTUP_PROFILE_HEAP: boolean
+ STARTUP_PROFILE_HEAP_INTERVAL: number
+ STARTUP_PROFILE_HEAP_DEPTH: number
+
// local directory might be a volume mount or a directory on disk (e.g. in local dev)
SESSION_RECORDING_LOCAL_DIRECTORY: string
SESSION_RECORDING_MAX_BUFFER_AGE_SECONDS: number
diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts
index 710a163752a6b..4e37d8a5cd715 100644
--- a/plugin-server/src/utils/db/hub.ts
+++ b/plugin-server/src/utils/db/hub.ts
@@ -91,7 +91,6 @@ export async function createHub(
: undefined,
rejectUnauthorized: serverConfig.CLICKHOUSE_CA ? false : undefined,
})
- await clickhouse.querying('SELECT 1') // test that the connection works
status.info('👍', `ClickHouse ready`)
status.info('🤔', `Connecting to Kafka...`)
diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts
index 49db8914194f6..9e4eb0a3c11b7 100644
--- a/plugin-server/src/utils/db/utils.ts
+++ b/plugin-server/src/utils/db/utils.ts
@@ -39,7 +39,22 @@ export function timeoutGuard(
}, timeout)
}
-const campaignParams = new Set([
+const eventToPersonProperties = new Set([
+ // mobile params
+ '$app_build',
+ '$app_name',
+ '$app_namespace',
+ '$app_version',
+ // web params
+ '$browser',
+ '$browser_version',
+ '$device_type',
+ '$current_url',
+ '$pathname',
+ '$os',
+ '$referring_domain',
+ '$referrer',
+ // campaign params
'utm_source',
'utm_medium',
'utm_campaign',
@@ -50,31 +65,29 @@ const campaignParams = new Set([
'fbclid',
'msclkid',
])
-const initialParams = new Set([
- '$browser',
- '$browser_version',
- '$device_type',
- '$current_url',
- '$pathname',
- '$os',
- '$referring_domain',
- '$referrer',
-])
-const combinedParams = new Set([...campaignParams, ...initialParams])
/** If we get new UTM params, make sure we set those **/
export function personInitialAndUTMProperties(properties: Properties): Properties {
const propertiesCopy = { ...properties }
- const maybeSet = Object.entries(properties).filter(([key]) => campaignParams.has(key))
- const maybeSetInitial = Object.entries(properties)
- .filter(([key]) => combinedParams.has(key))
- .map(([key, value]) => [`$initial_${key.replace('$', '')}`, value])
- if (Object.keys(maybeSet).length > 0) {
+ const propertiesForPerson: [string, any][] = Object.entries(properties).filter(([key]) =>
+ eventToPersonProperties.has(key)
+ )
+
+ // all potential params are checked for $initial_ values and added to $set_once
+ const maybeSetOnce: [string, any][] = propertiesForPerson.map(([key, value]) => [
+ `$initial_${key.replace('$', '')}`,
+ value,
+ ])
+
+ // all found are also then added to $set
+ const maybeSet: [string, any][] = propertiesForPerson
+
+ if (maybeSet.length > 0) {
propertiesCopy.$set = { ...(properties.$set || {}), ...Object.fromEntries(maybeSet) }
}
- if (Object.keys(maybeSetInitial).length > 0) {
- propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetInitial) }
+ if (maybeSetOnce.length > 0) {
+ propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetOnce) }
}
return propertiesCopy
}
diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts
index 69c56640bf886..aace016721449 100644
--- a/plugin-server/src/utils/utils.ts
+++ b/plugin-server/src/utils/utils.ts
@@ -312,14 +312,6 @@ export function escapeClickHouseString(string: string): string {
return string.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
}
-export function groupIntoBatches(array: T[], batchSize: number): T[][] {
- const batches = []
- for (let i = 0; i < array.length; i += batchSize) {
- batches.push(array.slice(i, i + batchSize))
- }
- return batches
-}
-
/** Standardize JS code used internally to form without extraneous indentation. Template literal function. */
export function code(strings: TemplateStringsArray): string {
const stringsConcat = strings.join('…')
diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts
index e5f1327895cfa..72a82a07d0aec 100644
--- a/plugin-server/src/worker/ingestion/person-state.ts
+++ b/plugin-server/src/worker/ingestion/person-state.ts
@@ -17,9 +17,17 @@ import { castTimestampOrNow, UUIDT } from '../../utils/utils'
import { captureIngestionWarning } from './utils'
const MAX_FAILED_PERSON_MERGE_ATTEMPTS = 3
+
+export const mergeFinalFailuresCounter = new Counter({
+ name: 'person_merge_final_failure_total',
+ help: 'Number of person merge final failures.',
+})
+
+// used to prevent identify from being used with generic IDs
+// that we can safely assume stem from a bug or mistake
// used to prevent identify from being used with generic IDs
// that we can safely assume stem from a bug or mistake
-const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([
+const BARE_CASE_INSENSITIVE_ILLEGAL_IDS = [
'anonymous',
'guest',
'distinctid',
@@ -30,17 +38,34 @@ const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([
'undefined',
'true',
'false',
-])
-
-export const mergeFinalFailuresCounter = new Counter({
- name: 'person_merge_final_failure_total',
- help: 'Number of person merge final failures.',
-})
-
-const CASE_SENSITIVE_ILLEGAL_IDS = new Set(['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined'])
+]
+
+const BARE_CASE_SENSITIVE_ILLEGAL_IDS = ['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined']
+
+// we have seen illegal ids received but wrapped in double quotes
+// to protect ourselves from this we'll add the single- and double-quoted versions of the illegal ids
+const singleQuoteIds = (ids: string[]) => ids.map((id) => `'${id}'`)
+const doubleQuoteIds = (ids: string[]) => ids.map((id) => `"${id}"`)
+
+// some ids are illegal regardless of casing
+// while others are illegal only when cased
+// so, for example, we want to forbid `NaN` but not `nan`
+// but, we will forbid `uNdEfInEd` and `undefined`
+const CASE_INSENSITIVE_ILLEGAL_IDS = new Set(
+ BARE_CASE_INSENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS)).concat(
+ doubleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS)
+ )
+)
+
+const CASE_SENSITIVE_ILLEGAL_IDS = new Set(
+ BARE_CASE_SENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS)).concat(
+ doubleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS)
+ )
+)
const isDistinctIdIllegal = (id: string): boolean => {
- return id.trim() === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id)
+ const trimmed = id.trim()
+ return trimmed === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id)
}
// This class is responsible for creating/updating a single person through the process-event pipeline
@@ -245,7 +270,7 @@ export class PersonState {
this.teamId,
this.timestamp
)
- } else if (this.event.event === '$identify' && this.eventProperties['$anon_distinct_id']) {
+ } else if (this.event.event === '$identify' && '$anon_distinct_id' in this.eventProperties) {
return await this.merge(
String(this.eventProperties['$anon_distinct_id']),
this.distinctId,
diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts
index 82de215adf5e2..44327a6a8bfd5 100644
--- a/plugin-server/src/worker/ingestion/process-event.ts
+++ b/plugin-server/src/worker/ingestion/process-event.ts
@@ -272,7 +272,7 @@ export interface SummarizedSessionRecordingEvent {
team_id: number
distinct_id: string
session_id: string
- first_url: string | undefined
+ first_url: string | null
click_count: number
keypress_count: number
mouse_activity_count: number
@@ -281,6 +281,8 @@ export interface SummarizedSessionRecordingEvent {
console_warn_count: number
console_error_count: number
size: number
+ event_count: number
+ message_count: number
}
export const createSessionReplayEvent = (
@@ -311,7 +313,7 @@ export const createSessionReplayEvent = (
let consoleLogCount = 0
let consoleWarnCount = 0
let consoleErrorCount = 0
- let url: string | undefined = undefined
+ let url: string | null = null
events.forEach((event) => {
if (event.type === 3) {
mouseActivity += 1
@@ -322,7 +324,7 @@ export const createSessionReplayEvent = (
keypressCount += 1
}
}
- if (!!event.data?.href?.trim().length && url === undefined) {
+ if (url === null && !!event.data?.href?.trim().length) {
url = event.data.href
}
if (event.type === 6 && event.data?.plugin === 'rrweb/console@1') {
@@ -339,22 +341,26 @@ export const createSessionReplayEvent = (
const activeTime = activeMilliseconds(events)
+ // NB forces types to be correct e.g. by truncating or rounding
+ // to ensure we don't send floats when we should send an integer
const data: SummarizedSessionRecordingEvent = {
uuid,
team_id: team_id,
- distinct_id: distinct_id,
+ distinct_id: String(distinct_id),
session_id: session_id,
first_timestamp: timestamps[0],
last_timestamp: timestamps[timestamps.length - 1],
- click_count: clickCount,
- keypress_count: keypressCount,
- mouse_activity_count: mouseActivity,
+ click_count: Math.trunc(clickCount),
+ keypress_count: Math.trunc(keypressCount),
+ mouse_activity_count: Math.trunc(mouseActivity),
first_url: url,
- active_milliseconds: activeTime,
- console_log_count: consoleLogCount,
- console_warn_count: consoleWarnCount,
- console_error_count: consoleErrorCount,
- size: Buffer.byteLength(JSON.stringify(events), 'utf8'),
+ active_milliseconds: Math.round(activeTime),
+ console_log_count: Math.trunc(consoleLogCount),
+ console_warn_count: Math.trunc(consoleWarnCount),
+ console_error_count: Math.trunc(consoleErrorCount),
+ size: Math.trunc(Buffer.byteLength(JSON.stringify(events), 'utf8')),
+ event_count: Math.trunc(events.length),
+ message_count: 1,
}
return data
diff --git a/plugin-server/src/worker/plugins/setup.ts b/plugin-server/src/worker/plugins/setup.ts
index 2ff72c9a899aa..4d2d2e33e8807 100644
--- a/plugin-server/src/worker/plugins/setup.ts
+++ b/plugin-server/src/worker/plugins/setup.ts
@@ -26,8 +26,11 @@ export async function setupPlugins(hub: Hub): Promise {
pluginConfig.vm = statelessVms[plugin.id]
} else {
pluginConfig.vm = new LazyPluginVM(hub, pluginConfig)
- pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig))
-
+ if (hub.PLUGIN_LOAD_SEQUENTIALLY) {
+ await loadPlugin(hub, pluginConfig)
+ } else {
+ pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig))
+ }
if (prevConfig) {
void teardownPlugins(hub, prevConfig)
}
diff --git a/plugin-server/tests/http-server.test.ts b/plugin-server/tests/http-server.test.ts
index eed0dd1907ffc..3900168cd2039 100644
--- a/plugin-server/tests/http-server.test.ts
+++ b/plugin-server/tests/http-server.test.ts
@@ -1,7 +1,7 @@
import http from 'http'
+import { DEFAULT_HTTP_SERVER_PORT } from '../src/config/config'
import { startPluginsServer } from '../src/main/pluginsServer'
-import { HTTP_SERVER_PORT } from '../src/main/services/http-server'
import { makePiscina } from '../src/worker/piscina'
import { resetTestDatabase } from './helpers/sql'
@@ -40,7 +40,7 @@ describe('http server', () => {
)
await new Promise((resolve) =>
- http.get(`http://localhost:${HTTP_SERVER_PORT}/_health`, (res) => {
+ http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_health`, (res) => {
const { statusCode } = res
expect(statusCode).toEqual(200)
resolve(null)
@@ -68,7 +68,7 @@ describe('http server', () => {
)
await new Promise((resolve) =>
- http.get(`http://localhost:${HTTP_SERVER_PORT}/_ready`, (res) => {
+ http.get(`http://localhost:${DEFAULT_HTTP_SERVER_PORT}/_ready`, (res) => {
const { statusCode } = res
expect(statusCode).toEqual(200)
resolve(null)
diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
index 617978884fe29..0580f53d2724b 100644
--- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
@@ -9,13 +9,10 @@ import {
eachBatchLegacyIngestion,
splitKafkaJSIngestionBatch,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion-kafkajs'
-import {
- eachBatch,
- eachBatchAppsOnEventHandlers,
-} from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
+import { eachBatchAppsOnEventHandlers } from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
import {
eachBatchWebhooksHandlers,
- groupIntoBatchesWebhooks,
+ groupIntoBatchesByUsage,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-webhooks'
import {
ClickHouseTimestamp,
@@ -24,7 +21,6 @@ import {
PostIngestionEvent,
RawClickHouseEvent,
} from '../../../src/types'
-import { groupIntoBatches } from '../../../src/utils/utils'
import { ActionManager } from '../../../src/worker/ingestion/action-manager'
import { ActionMatcher } from '../../../src/worker/ingestion/action-matcher'
import { HookCommander } from '../../../src/worker/ingestion/hooks'
@@ -150,26 +146,6 @@ describe('eachBatchX', () => {
}
})
- describe('eachBatch', () => {
- it('calls eachMessage with the correct arguments', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- const batch = createKafkaJSBatch(event)
- await eachBatch(batch, queue, eachMessage, groupIntoBatches, 'key')
-
- expect(eachMessage).toHaveBeenCalledWith({ value: JSON.stringify(event) }, queue)
- })
-
- it('tracks metrics based on the key', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- await eachBatch(createKafkaJSBatch(event), queue, eachMessage, groupIntoBatches, 'my_key')
-
- expect(queue.pluginsServer.statsd.timing).toHaveBeenCalledWith(
- 'kafka_queue.each_batch_my_key',
- expect.any(Date)
- )
- })
- })
-
describe('eachBatchAppsOnEventHandlers', () => {
it('calls runAppsOnEventPipeline when useful', async () => {
queue.pluginsServer.pluginConfigsPerTeam.set(2, [pluginConfig39])
@@ -333,11 +309,9 @@ describe('eachBatchX', () => {
kafkaTimestamp: '2020-02-23 00:10:00.00' as ClickHouseTimestamp,
},
])
- const actionManager = new ActionManager(queue.pluginsServer.postgres)
- const actionMatcher = new ActionMatcher(queue.pluginsServer.postgres, actionManager)
- // mock hasWebhooks 10 calls, 1,3,10 should return false, others true
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result = groupIntoBatchesWebhooks(batch.batch.messages, 5, actionMatcher)
+ // teamIDs 1,3,10 should return false, others true
+ const toProcess = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
+ const result = groupIntoBatchesByUsage(batch.batch.messages, 5, toProcess)
expect(result).toEqual([
{
eventBatch: expect.arrayContaining([
@@ -375,8 +349,7 @@ describe('eachBatchX', () => {
])
// make sure that if the last message would be a new batch and if it's going to be excluded we
// still get the last batch as empty with the right offsite and timestamp
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result2 = groupIntoBatchesWebhooks(batch.batch.messages, 7, actionMatcher)
+ const result2 = groupIntoBatchesByUsage(batch.batch.messages, 7, toProcess)
expect(result2).toEqual([
{
eventBatch: expect.arrayContaining([
diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
index c0912a2ca499b..31dc19d000f3b 100644
--- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
@@ -1,4 +1,7 @@
+import { Assignment } from 'node-rdkafka-acosom'
+
import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics'
+import { countPartitionsPerTopic } from '../../../src/kafka/consumer'
import { ServerInstance, startPluginsServer } from '../../../src/main/pluginsServer'
import { LogLevel, PluginsServerConfig } from '../../../src/types'
import { Hub } from '../../../src/types'
@@ -79,3 +82,22 @@ describe.skip('IngestionConsumer', () => {
expect(bufferCalls.length).toEqual(1)
})
})
+
+describe('countPartitionsPerTopic', () => {
+ it('should correctly count the number of partitions per topic', () => {
+ const assignments: Assignment[] = [
+ { topic: 'topic1', partition: 0 },
+ { topic: 'topic1', partition: 1 },
+ { topic: 'topic2', partition: 0 },
+ { topic: 'topic2', partition: 1 },
+ { topic: 'topic2', partition: 2 },
+ { topic: 'topic3', partition: 0 },
+ ]
+
+ const result = countPartitionsPerTopic(assignments)
+ expect(result.get('topic1')).toBe(2)
+ expect(result.get('topic2')).toBe(3)
+ expect(result.get('topic3')).toBe(1)
+ expect(result.size).toBe(3)
+ })
+})
diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
index 7bdb5d749e778..105ebf249e2ba 100644
--- a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
+++ b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
@@ -16,7 +16,6 @@ export function createIncomingRecordingMessage(
session_id: 'session_id_1',
window_id: 'window_id_1',
events: [{ ...jsonFullSnapshot }],
- replayIngestionConsumer: 'v2',
...partialIncomingMessage,
metadata: {
diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
index 87b66a7210fc1..c792c9b1947e5 100644
--- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
@@ -211,7 +211,6 @@ describe('ingester', () => {
timestamp: 1,
topic: 'the_topic',
},
- replayIngestionConsumer: 'v2',
session_id: '018a47c2-2f4a-70a8-b480-5e51d8b8d070',
team_id: 1,
window_id: '018a47c2-2f4a-70a8-b480-5e52f5480448',
diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts
index 25251874e2086..94505831b8452 100644
--- a/plugin-server/tests/main/process-event.test.ts
+++ b/plugin-server/tests/main/process-event.test.ts
@@ -315,7 +315,7 @@ test('capture new person', async () => {
let persons = await hub.db.fetchPersons()
expect(persons[0].version).toEqual(0)
expect(persons[0].created_at).toEqual(now)
- let expectedProps = {
+ let expectedProps: Record = {
$creator_event_uuid: uuid,
$initial_browser: 'Chrome',
$initial_browser_version: '95',
@@ -329,6 +329,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -343,7 +349,17 @@ test('capture new person', async () => {
expect(events[0].properties).toEqual({
$ip: '127.0.0.1',
$os: 'Mac OS X',
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
+ $set: {
+ utm_medium: 'twitter',
+ gclid: 'GOOGLE ADS ID',
+ msclkid: 'BING ADS ID',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
+ },
token: 'THIS IS NOT A TOKEN FOR TEAM 2',
$browser: 'Chrome',
$set_once: {
@@ -412,6 +428,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -425,6 +447,9 @@ test('capture new person', async () => {
expect(events[1].properties.$set).toEqual({
utm_medium: 'instagram',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
})
expect(events[1].properties.$set_once).toEqual({
$initial_browser: 'Firefox',
@@ -481,6 +506,9 @@ test('capture new person', async () => {
expect(persons[0].version).toEqual(1)
expect(events[2].properties.$set).toEqual({
+ $browser: 'Firefox',
+ $current_url: 'https://test.com/pricing',
+
utm_medium: 'instagram',
})
expect(events[2].properties.$set_once).toEqual({
@@ -1236,6 +1264,8 @@ const sessionReplayEventTestCases: {
| 'console_warn_count'
| 'console_error_count'
| 'size'
+ | 'event_count'
+ | 'message_count'
>
}[] = [
{
@@ -1244,7 +1274,7 @@ const sessionReplayEventTestCases: {
click_count: 1,
keypress_count: 0,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1252,6 +1282,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1260,7 +1292,7 @@ const sessionReplayEventTestCases: {
click_count: 0,
keypress_count: 1,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1268,6 +1300,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1316,7 +1350,7 @@ const sessionReplayEventTestCases: {
click_count: 0,
keypress_count: 1,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1324,6 +1358,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 3,
console_error_count: 1,
size: 762,
+ event_count: 7,
+ message_count: 1,
},
},
{
@@ -1362,6 +1398,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 213,
+ event_count: 2,
+ message_count: 1,
},
},
{
@@ -1381,7 +1419,7 @@ const sessionReplayEventTestCases: {
click_count: 6,
keypress_count: 0,
mouse_activity_count: 6,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.000',
last_timestamp: '2023-04-25 18:58:19.000',
active_milliseconds: 6000, // can sum up the activity across windows
@@ -1389,6 +1427,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 433,
+ event_count: 6,
+ message_count: 1,
},
},
]
diff --git a/plugin-server/tests/utils/db/utils.test.ts b/plugin-server/tests/utils/db/utils.test.ts
index 5201b8e60b803..420c645472ff3 100644
--- a/plugin-server/tests/utils/db/utils.test.ts
+++ b/plugin-server/tests/utils/db/utils.test.ts
@@ -17,41 +17,74 @@ describe('personInitialAndUTMProperties()', () => {
{ tag_name: 'a', nth_child: 1, nth_of_type: 2, attr__class: 'btn btn-sm' },
{ tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
],
+ $app_build: 2,
+ $app_name: 'my app',
+ $app_namespace: 'com.posthog.myapp',
+ $app_version: '1.2.3',
}
- expect(personInitialAndUTMProperties(properties)).toEqual({
- distinct_id: 2,
- $browser: 'Chrome',
- $current_url: 'https://test.com',
- $os: 'Mac OS X',
- $browser_version: '95',
- $referring_domain: 'https://google.com',
- $referrer: 'https://google.com/?q=posthog',
- utm_medium: 'twitter',
- gclid: 'GOOGLE ADS ID',
- msclkid: 'BING ADS ID',
- $elements: [
- {
- tag_name: 'a',
- nth_child: 1,
- nth_of_type: 2,
- attr__class: 'btn btn-sm',
+ expect(personInitialAndUTMProperties(properties)).toMatchInlineSnapshot(`
+ Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$elements": Array [
+ Object {
+ "attr__class": "btn btn-sm",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "a",
},
- { tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
- ],
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
- $set_once: {
- $initial_browser: 'Chrome',
- $initial_current_url: 'https://test.com',
- $initial_os: 'Mac OS X',
- $initial_browser_version: '95',
- $initial_utm_medium: 'twitter',
- $initial_gclid: 'GOOGLE ADS ID',
- $initial_msclkid: 'BING ADS ID',
- $initial_referring_domain: 'https://google.com',
- $initial_referrer: 'https://google.com/?q=posthog',
- },
- })
+ Object {
+ "$el_text": "💻",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "div",
+ },
+ ],
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "$set": Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ },
+ "$set_once": Object {
+ "$initial_app_build": 2,
+ "$initial_app_name": "my app",
+ "$initial_app_namespace": "com.posthog.myapp",
+ "$initial_app_version": "1.2.3",
+ "$initial_browser": "Chrome",
+ "$initial_browser_version": "95",
+ "$initial_current_url": "https://test.com",
+ "$initial_gclid": "GOOGLE ADS ID",
+ "$initial_msclkid": "BING ADS ID",
+ "$initial_os": "Mac OS X",
+ "$initial_referrer": "https://google.com/?q=posthog",
+ "$initial_referring_domain": "https://google.com",
+ "$initial_utm_medium": "twitter",
+ },
+ "distinct_id": 2,
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ }
+ `)
})
it('initial current domain regression test', () => {
@@ -62,6 +95,7 @@ describe('personInitialAndUTMProperties()', () => {
expect(personInitialAndUTMProperties(properties)).toEqual({
$current_url: 'https://test.com',
$set_once: { $initial_current_url: 'https://test.com' },
+ $set: { $current_url: 'https://test.com' },
})
})
})
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
index 837079da765eb..343826d81a4f2 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
@@ -105,6 +105,7 @@ describe('Event Pipeline integration test', () => {
$set: {
personProp: 'value',
anotherValue: 2,
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -118,6 +119,7 @@ describe('Event Pipeline integration test', () => {
expect(persons[0].properties).toEqual({
$creator_event_uuid: event.uuid,
$initial_browser: 'Chrome',
+ $browser: 'Chrome',
personProp: 'value',
anotherValue: 2,
})
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
index 71d495bcf9bce..d2ce3aa76e383 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
@@ -85,6 +85,7 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
$browser: 'Chrome',
$set: {
someProp: 'value',
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -95,7 +96,12 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
expect.objectContaining({
id: expect.any(Number),
uuid: expect.any(String),
- properties: { $initial_browser: 'Chrome', someProp: 'value', $creator_event_uuid: expect.any(String) },
+ properties: {
+ $initial_browser: 'Chrome',
+ someProp: 'value',
+ $creator_event_uuid: expect.any(String),
+ $browser: 'Chrome',
+ },
version: 0,
is_identified: false,
})
diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts
index b44f60e8d2dda..66fa35976d274 100644
--- a/plugin-server/tests/worker/ingestion/person-state.test.ts
+++ b/plugin-server/tests/worker/ingestion/person-state.test.ts
@@ -25,17 +25,20 @@ describe('PersonState.update()', () => {
let uuid2: UUIDT
let teamId: number
let poEEmbraceJoin: boolean
+ let organizationId: string
beforeAll(async () => {
;[hub, closeHub] = await createHub({})
await hub.db.clickhouseQuery('SYSTEM STOP MERGES')
+
+ organizationId = await createOrganization(hub.db.postgres)
})
beforeEach(async () => {
poEEmbraceJoin = false
uuid = new UUIDT()
uuid2 = new UUIDT()
- const organizationId = await createOrganization(hub.db.postgres)
+
teamId = await createTeam(hub.db.postgres, organizationId)
jest.spyOn(hub.db, 'fetchPerson')
@@ -1078,10 +1081,11 @@ describe('PersonState.update()', () => {
hub.statsd = { increment: jest.fn() } as any
})
- it('stops $identify if current distinct_id is illegal', async () => {
+ const illegalIds = ['', ' ', 'null', 'undefined', '"undefined"', '[object Object]', '"[object Object]"']
+ it.each(illegalIds)('stops $identify if current distinct_id is illegal: `%s`', async (illegalId: string) => {
const person = await personState({
event: '$identify',
- distinct_id: '[object Object]',
+ distinct_id: illegalId,
properties: {
$anon_distinct_id: 'anonymous_id',
},
@@ -1092,16 +1096,16 @@ describe('PersonState.update()', () => {
expect(persons.length).toEqual(0)
expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', {
- distinctId: '[object Object]',
+ distinctId: illegalId,
})
})
- it('stops $identify if $anon_distinct_id is illegal', async () => {
+ it.each(illegalIds)('stops $identify if $anon_distinct_id is illegal: `%s`', async (illegalId: string) => {
const person = await personState({
event: '$identify',
distinct_id: 'some_distinct_id',
properties: {
- $anon_distinct_id: 'undefined',
+ $anon_distinct_id: illegalId,
},
}).handleIdentifyOrAlias()
@@ -1110,7 +1114,7 @@ describe('PersonState.update()', () => {
expect(persons.length).toEqual(0)
expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', {
- distinctId: 'undefined',
+ distinctId: illegalId,
})
})
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 52699fa9c3fb3..301601dff1e49 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1,4 +1,4 @@
-lockfileVersion: '6.1'
+lockfileVersion: '6.0'
settings:
autoInstallPeers: true
@@ -39,8 +39,8 @@ dependencies:
specifier: ^2.1.2
version: 2.1.2(react@16.14.0)
'@rrweb/types':
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
'@sentry/react':
specifier: 7.22.0
version: 7.22.0(react@16.14.0)
@@ -194,9 +194,12 @@ dependencies:
monaco-editor:
specifier: ^0.39.0
version: 0.39.0
+ papaparse:
+ specifier: ^5.4.1
+ version: 5.4.1
posthog-js:
- specifier: 1.78.1
- version: 1.78.1
+ specifier: 1.78.5
+ version: 1.78.5
posthog-js-lite:
specifier: 2.0.0-alpha5
version: 2.0.0-alpha5
@@ -282,8 +285,8 @@ dependencies:
specifier: ^1.5.1
version: 1.5.1
rrweb:
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
sass:
specifier: ^1.26.2
version: 1.56.0
@@ -432,6 +435,9 @@ devDependencies:
'@types/node':
specifier: ^18.11.9
version: 18.11.9
+ '@types/papaparse':
+ specifier: ^5.3.8
+ version: 5.3.8
'@types/pixelmatch':
specifier: ^5.2.4
version: 5.2.4
@@ -608,7 +614,7 @@ devDependencies:
version: 7.3.1
storybook-addon-pseudo-states:
specifier: 2.1.0
- version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0)
+ version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0)
style-loader:
specifier: ^2.0.0
version: 2.0.0(webpack@5.88.2)
@@ -978,7 +984,7 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
dependencies:
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
dev: true
/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10):
@@ -2083,8 +2089,8 @@ packages:
'@babel/helper-validator-identifier': 7.22.5
to-fast-properties: 2.0.0
- /@babel/types@7.22.15:
- resolution: {integrity: sha512-X+NLXr0N8XXmN5ZsaQdm9U2SSC3UbIYq/doL++sueHOTisgZHoKaQtZxGuV2cUPQHMfjKEfg/g6oy7Hm6SKFtA==}
+ /@babel/types@7.22.17:
+ resolution: {integrity: sha512-YSQPHLFtQNE5xN9tHuZnzu8vPr61wVTBZdfv1meex1NBosa4iT05k/Jw06ddJugi4bk7The/oSwQGFcksmEJQg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-string-parser': 7.22.5
@@ -3928,10 +3934,10 @@ packages:
type-fest: 2.19.0
dev: false
- /@rrweb/types@2.0.0-alpha.9:
- resolution: {integrity: sha512-yS2KghLSmSSxo6H7tHrJ6u+nWJA9zCXaKFyc79rUSX8RHHSImRqocTqJ8jz794kCIWA90rvaQayRONdHO+vB0Q==}
+ /@rrweb/types@2.0.0-alpha.11:
+ resolution: {integrity: sha512-8ccocIkT5J/bfNRQY85qR/g6p5YQFpgFO2cMt4+Ex7w31Lq0yqZBRaoYEsawQKpLrn5KOHkdn2UTUrna7WMQuA==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/@sentry/browser@7.22.0:
@@ -4519,11 +4525,11 @@ packages:
tiny-invariant: 1.3.1
dev: true
- /@storybook/channels@7.4.0:
- resolution: {integrity: sha512-/1CU0s3npFumzVHLGeubSyPs21O3jNqtSppOjSB9iDTyV2GtQrjh5ntVwebfKpCkUSitx3x7TkCb9dylpEZ8+w==}
+ /@storybook/channels@7.4.1:
+ resolution: {integrity: sha512-gnE1mNrRF+9oCVRMq6MS/tLXJbYmf9P02PCC3KpMLcSsABdH5jcrACejzJVo/kE223knFH7NJc4BBj7+5h0uXA==}
dependencies:
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/global': 5.0.0
qs: 6.11.2
telejson: 7.2.0
@@ -4587,8 +4593,8 @@ packages:
'@storybook/global': 5.0.0
dev: true
- /@storybook/client-logger@7.4.0:
- resolution: {integrity: sha512-4pBnf7+df1wXEVcF1civqxbrtccGGHQkfWQkJo49s53RXvF7SRTcif6XTx0V3cQV0v7I1C5mmLm0LNlmjPRP1Q==}
+ /@storybook/client-logger@7.4.1:
+ resolution: {integrity: sha512-2j0DQlKlPNY8XAaEZv+mUYEUm4dOWg6/Q92UNbvYPRK5qbXUvbMiQco5nmvg4LvMT6y99LhRSW2xrwEx5xKAKw==}
dependencies:
'@storybook/global': 5.0.0
dev: true
@@ -4679,8 +4685,8 @@ packages:
resolution: {integrity: sha512-7Pkgwmj/9B7Z3NNSn2swnviBrg9L1VeYSFw6JJKxtQskt8QoY8LxAsPzVMlHjqRmO6sO7lHo9FgpzIFxdmFaAA==}
dev: true
- /@storybook/core-events@7.4.0:
- resolution: {integrity: sha512-JavEo4dw7TQdF5pSKjk4RtqLgsG2R/eWRI8vZ3ANKa0ploGAnQR/eMTfSxf6TUH3ElBWLJhi+lvUCkKXPQD+dw==}
+ /@storybook/core-events@7.4.1:
+ resolution: {integrity: sha512-F1tGb32XZ4FRfbtXdi4b+zdzWUjFz5rn3TF18mSuBGGXvxKU+4tywgjGQ3dKGdvuP754czn3poSdz2ZW08bLsQ==}
dependencies:
ts-dedent: 2.2.0
dev: true
@@ -4845,20 +4851,20 @@ packages:
ts-dedent: 2.2.0
dev: true
- /@storybook/manager-api@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-sBfkkt0eZGTozeKrbzMtWLEOQrgqdk24OUJlkc2IDaucR1CBNjoCMjNeYg7cLDw0rXE8W3W3AdWtJnfsUbLMAQ==}
+ /@storybook/manager-api@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-nzYasETW20uDWpfST6JFf6c/GSFB/dj7xVtg5EpvAYF8GkErCk9TvNKdLNroRrIYm5VJxHWC2V+CJ07RuX3Glw==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
- '@storybook/channels': 7.4.0
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/channels': 7.4.1
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/csf': 0.1.1
'@storybook/global': 5.0.0
- '@storybook/router': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/theming': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/types': 7.4.0
+ '@storybook/router': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/theming': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/types': 7.4.1
dequal: 2.0.3
lodash: 4.17.21
memoizerific: 1.11.3
@@ -4954,15 +4960,15 @@ packages:
util-deprecate: 1.0.2
dev: true
- /@storybook/preview-api@7.4.0:
- resolution: {integrity: sha512-ndXO0Nx+eE7ktVE4EqHpQZ0guX7yYBdruDdJ7B739C0+OoPWsJN7jAzUqq0NXaBcYrdaU5gTy+KnWJUt8R+OyA==}
+ /@storybook/preview-api@7.4.1:
+ resolution: {integrity: sha512-swmosWK73lP0CXDKMOwYIaaId28+muPDYX2V/0JmIOA+45HFXimeXZs3XsgVgQMutVF51QqnDA0pfrNgRofHgQ==}
dependencies:
- '@storybook/channels': 7.4.0
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/channels': 7.4.1
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/csf': 0.1.1
'@storybook/global': 5.0.0
- '@storybook/types': 7.4.0
+ '@storybook/types': 7.4.1
'@types/qs': 6.9.8
dequal: 2.0.3
lodash: 4.17.21
@@ -5097,13 +5103,13 @@ packages:
react-dom: 16.14.0(react@16.14.0)
dev: true
- /@storybook/router@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-IATdtFL5C3ryjNQSwaQfrmiOZiVFoVNMevMoBGDC++g0laSW40TGiNK6fUjUDBKuOgbuDt4Svfbl29k21GefEg==}
+ /@storybook/router@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-7tE1B18jb+5+ujXd3BHcub85QnytIVBNA0iAo+o8MNwArISyodqp12y2D3w+QpXkg0GtPhAp/CMhzpyxotPhRQ==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
- '@storybook/client-logger': 7.4.0
+ '@storybook/client-logger': 7.4.1
memoizerific: 1.11.3
qs: 6.11.2
react: 16.14.0
@@ -5196,14 +5202,14 @@ packages:
react-dom: 16.14.0(react@16.14.0)
dev: true
- /@storybook/theming@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-eLjEf6G3cqlegfutF/iUrec9LrUjKDj7K4ZhGdACWrf7bQcODs99EK62e9/d8GNKr4b+QMSEuM6XNGaqdPnuzQ==}
+ /@storybook/theming@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-a4QajZbnYumq8ovtn7nW7BeNrk/TaWyKmUrIz4w08I6ghzESJA4aCWZ6394awbrruiIOzCCKOUq4mfWEsc8W6A==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
'@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@16.14.0)
- '@storybook/client-logger': 7.4.0
+ '@storybook/client-logger': 7.4.1
'@storybook/global': 5.0.0
memoizerific: 1.11.3
react: 16.14.0
@@ -5219,13 +5225,12 @@ packages:
file-system-cache: 2.3.0
dev: true
- /@storybook/types@7.4.0:
- resolution: {integrity: sha512-XyzYkmeklywxvElPrIWLczi/PWtEdgTL6ToT3++FVxptsC2LZKS3Ue+sBcQ9xRZhkRemw4HQHwed5EW3dO8yUg==}
+ /@storybook/types@7.4.1:
+ resolution: {integrity: sha512-bjt1YDG9AocFBhIFRvGGbYZPlD223p+qAFcFgYdezU16fFE4ZGFUzUuq2ERkOofL7a2+OzLTCQ/SKe1jFkXCxQ==}
dependencies:
- '@storybook/channels': 7.4.0
+ '@storybook/channels': 7.4.1
'@types/babel__core': 7.20.1
'@types/express': 4.17.17
- '@types/react': 16.14.34
file-system-cache: 2.3.0
dev: true
@@ -5721,7 +5726,7 @@ packages:
resolution: {integrity: sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw==}
dependencies:
'@babel/parser': 7.22.16
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
'@types/babel__generator': 7.6.4
'@types/babel__template': 7.4.1
'@types/babel__traverse': 7.20.1
@@ -5749,7 +5754,7 @@ packages:
/@types/babel__traverse@7.20.1:
resolution: {integrity: sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg==}
dependencies:
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
dev: true
/@types/body-parser@1.19.2:
@@ -6213,6 +6218,12 @@ packages:
resolution: {integrity: sha512-sn7L+qQ6RLPdXRoiaE7bZ/Ek+o4uICma/lBFPyJEKDTPTBP1W8u0c4baj3EiS4DiqLs+Hk+KUGvMVJtAw3ePJg==}
dev: false
+ /@types/papaparse@5.3.8:
+ resolution: {integrity: sha512-ArKIEOOWULbhi53wkAiRy1ze4wvrTfhpAj7Yfzva+EkmX2sV8PpFB+xqzJfzXNzK4me95FJH9QZt5NXFVGzOoQ==}
+ dependencies:
+ '@types/node': 18.11.9
+ dev: true
+
/@types/parse-json@4.0.0:
resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==}
dev: true
@@ -12994,7 +13005,7 @@ packages:
dependencies:
universalify: 2.0.0
optionalDependencies:
- graceful-fs: 4.2.10
+ graceful-fs: 4.2.11
/jsprim@2.0.2:
resolution: {integrity: sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==}
@@ -14266,6 +14277,10 @@ packages:
resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==}
dev: true
+ /papaparse@5.4.1:
+ resolution: {integrity: sha512-HipMsgJkZu8br23pW15uvo6sib6wne/4woLZPlFf3rpDyMe9ywEXUsuD7+6K9PRkJlVT51j/sCOYDKGGS3ZJrw==}
+ dev: false
+
/param-case@3.0.4:
resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==}
dependencies:
@@ -14909,8 +14924,8 @@ packages:
resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==}
dev: false
- /posthog-js@1.78.1:
- resolution: {integrity: sha512-5tJoF56gGg4B4CSlLbWHuTpi7Ch7wksjCkPonHlQAc61ZZRymTB63tRheCvkcf+Omf8PBkO+2NJ0XEgrkRHE0A==}
+ /posthog-js@1.78.5:
+ resolution: {integrity: sha512-UUipML52LEyks7Pbx/3dpBJc2iPJrW+Ss6Y0BiIygn+QZoBjIe1WjE4Ep+Fnz7+cX1axex/ZiYholBnW7E4Aug==}
dependencies:
fflate: 0.4.8
dev: false
@@ -16608,27 +16623,27 @@ packages:
resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==}
dev: false
- /rrdom@2.0.0-alpha.9:
- resolution: {integrity: sha512-jfaZ8tHi098P4GpPEtkOwnkucyKA5eGanAVHGPklzCqAeEq1Yx+9/y8AeOtF3yiobqKKkW8lLvFH2KrBH1CZlQ==}
+ /rrdom@2.0.0-alpha.11:
+ resolution: {integrity: sha512-U37m0t4jTz63wnVRcOQ5qFzSTrI5RdNgeXnHAha2Fmh9+1K+XuCx421a8D1wZk3WcDc2sFz/04FVdM0OD2caHg==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
- /rrweb-snapshot@2.0.0-alpha.9:
- resolution: {integrity: sha512-mHg1uUE2iUf0MXLE//4r5cMynkbduwmaOEis4gC7EuqkUAC1pYoLpcYYVt9lD6dgYIF6BmK6dgLLzMpD/tTyyA==}
+ /rrweb-snapshot@2.0.0-alpha.11:
+ resolution: {integrity: sha512-N0dzeJA2VhrlSOadkKwCVmV/DuNOwBH+Lhx89hAf9PQK4lCS8AP4AaylhqUdZOYHqwVjqsYel/uZ4hN79vuLhw==}
dev: false
- /rrweb@2.0.0-alpha.9:
- resolution: {integrity: sha512-8E2yiLY7IrFjDcVUZ7AcQtdBNFuTIsBrlCMpbyLua6X64dGRhOZ+IUDXLnAbNj5oymZgFtZu2UERG9rmV2VAng==}
+ /rrweb@2.0.0-alpha.11:
+ resolution: {integrity: sha512-vJ2gNvF+pUG9C2aaau7iSNqhWBSc4BwtUO4FpegOtDObuH4PIaxNJOlgHz82+WxKr9XPm93ER0LqmNpy0KYdKg==}
dependencies:
- '@rrweb/types': 2.0.0-alpha.9
+ '@rrweb/types': 2.0.0-alpha.11
'@types/css-font-loading-module': 0.0.7
'@xstate/fsm': 1.6.5
base64-arraybuffer: 1.0.2
fflate: 0.4.8
mitt: 3.0.0
- rrdom: 2.0.0-alpha.9
- rrweb-snapshot: 2.0.0-alpha.9
+ rrdom: 2.0.0-alpha.11
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/rtl-css-js@1.16.0:
@@ -17135,7 +17150,7 @@ packages:
resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==}
dev: true
- /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0):
+ /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0):
resolution: {integrity: sha512-AwbCL1OiZ16aIeXSP/IOovkMwXy7NTZqmjkz+UM2guSGjvogHNA95NhuVyWoqieE+QWUpGO48+MrBGMeeJcHOQ==}
peerDependencies:
'@storybook/components': ^7.0.0
@@ -17153,8 +17168,8 @@ packages:
dependencies:
'@storybook/components': 7.3.1(@types/react-dom@16.9.17)(@types/react@16.14.34)(react-dom@16.14.0)(react@16.14.0)
'@storybook/core-events': 7.3.1
- '@storybook/manager-api': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/preview-api': 7.4.0
+ '@storybook/manager-api': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/preview-api': 7.4.1
'@storybook/theming': 7.3.1(react-dom@16.14.0)(react@16.14.0)
react: 16.14.0
react-dom: 16.14.0(react@16.14.0)
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index bdd8ecf3ed555..f61543e14f5cb 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -1,9 +1,8 @@
import json
from typing import Any, Dict, List, Optional, cast
-from django.db.models import QuerySet
+from django.db.models import QuerySet, Q
from django.conf import settings
-from django.db.models.query_utils import Q
from rest_framework import authentication, exceptions, request, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated
@@ -70,6 +69,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo
rollout_percentage = serializers.SerializerMethodField()
experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
+ surveys: serializers.SerializerMethodField = serializers.SerializerMethodField()
features: serializers.SerializerMethodField = serializers.SerializerMethodField()
usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True)
analytics_dashboards = serializers.PrimaryKeyRelatedField(
@@ -100,6 +100,7 @@ class Meta:
"rollout_percentage",
"ensure_experience_continuity",
"experiment_set",
+ "surveys",
"features",
"rollback_conditions",
"performed_rollback",
@@ -129,6 +130,12 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict:
return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data
+ def get_surveys(self, feature_flag: FeatureFlag) -> Dict:
+ from posthog.api.survey import SurveyAPISerializer
+
+ return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore
+ # ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship
+
def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]:
if self.get_is_simple_flag(feature_flag):
return feature_flag.conditions[0].get("rollout_percentage")
@@ -343,7 +350,9 @@ def get_queryset(self) -> QuerySet:
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
)
+
survey_targeting_flags = Survey.objects.filter(team=self.team, targeting_flag__isnull=False).values_list(
"targeting_flag_id", flat=True
)
@@ -434,6 +443,7 @@ def my_flags(self, request: request.Request, **kwargs):
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
.select_related("created_by")
.order_by("-created_at")
)
diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py
index 7f3cfae9be957..5c25efe42815d 100644
--- a/posthog/api/notebook.py
+++ b/posthog/api/notebook.py
@@ -1,5 +1,5 @@
from typing import Dict, List, Optional, Any
-
+from django.db.models import Q
import structlog
from django.db import transaction
from django.db.models import QuerySet
@@ -74,6 +74,7 @@ class Meta:
"short_id",
"title",
"content",
+ "text_content",
"version",
"deleted",
"created_at",
@@ -250,8 +251,13 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query
queryset = queryset.filter(
last_modified_at__lt=relative_date_parse(request.GET["date_to"], self.team.timezone_info)
)
- elif key == "s":
- queryset = queryset.filter(title__icontains=request.GET["s"])
+ elif key == "search":
+ queryset = queryset.filter(
+ # some notebooks have no text_content until next saved, so we need to check the title too
+ # TODO this can be removed once all/most notebooks have text_content
+ Q(title__search=request.GET["search"])
+ | Q(text_content__search=request.GET["search"])
+ )
elif key == "contains":
contains = request.GET["contains"]
match_pairs = contains.replace(",", " ").split(" ")
diff --git a/posthog/api/query.py b/posthog/api/query.py
index f6c9e871d0c6d..385f14d2f7905 100644
--- a/posthog/api/query.py
+++ b/posthog/api/query.py
@@ -25,6 +25,8 @@
from posthog.hogql.errors import HogQLException
from posthog.hogql.metadata import get_hogql_metadata
from posthog.hogql.query import execute_hogql_query
+
+from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner
from posthog.models import Team
from posthog.models.event.events_query import run_events_query
from posthog.models.user import User
@@ -203,22 +205,25 @@ def process_query(team: Team, query_json: Dict, default_limit: Optional[int] = N
if query_kind == "EventsQuery":
events_query = EventsQuery.parse_obj(query_json)
- response = run_events_query(query=events_query, team=team, default_limit=default_limit)
- return _unwrap_pydantic_dict(response)
+ events_response = run_events_query(query=events_query, team=team, default_limit=default_limit)
+ return _unwrap_pydantic_dict(events_response)
elif query_kind == "HogQLQuery":
hogql_query = HogQLQuery.parse_obj(query_json)
- response = execute_hogql_query(
+ hogql_response = execute_hogql_query(
query_type="HogQLQuery",
query=hogql_query.query,
team=team,
filters=hogql_query.filters,
default_limit=default_limit,
)
- return _unwrap_pydantic_dict(response)
+ return _unwrap_pydantic_dict(hogql_response)
elif query_kind == "HogQLMetadata":
metadata_query = HogQLMetadata.parse_obj(query_json)
- response = get_hogql_metadata(query=metadata_query, team=team)
- return _unwrap_pydantic_dict(response)
+ metadata_response = get_hogql_metadata(query=metadata_query, team=team)
+ return _unwrap_pydantic_dict(metadata_response)
+ elif query_kind == "LifecycleQuery":
+ lifecycle_query_runner = LifecycleQueryRunner(query_json, team)
+ return _unwrap_pydantic_dict(lifecycle_query_runner.run())
elif query_kind == "DatabaseSchemaQuery":
database = create_hogql_database(team.pk)
return serialize_database(database)
diff --git a/posthog/api/test/__snapshots__/test_session_recordings.ambr b/posthog/api/test/__snapshots__/test_session_recordings.ambr
index 11f6e55469f01..e97965ce54b8f 100644
--- a/posthog/api/test/__snapshots__/test_session_recordings.ambr
+++ b/posthog/api/test/__snapshots__/test_session_recordings.ambr
@@ -83,72 +83,6 @@
'
---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.100
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.101
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.102
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.103
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.104
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.106
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -159,7 +93,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.107
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.101
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -193,7 +127,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.108
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.102
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -201,7 +135,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.103
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -228,18 +162,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.110
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.104
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -254,7 +177,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.111
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -305,7 +228,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.112
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.106
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -334,7 +257,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.113
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.107
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -378,7 +301,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.114
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.108
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -407,7 +330,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.115
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -418,62 +341,62 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.116
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.11
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.117
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.110
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.118
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.111
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.119
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.112
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.113
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.120
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.114
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -484,7 +407,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.121
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.115
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -495,7 +418,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.122
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.116
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -506,7 +429,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.123
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.117
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -517,18 +440,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.124
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.118
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -539,7 +451,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.126
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.119
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -574,7 +486,18 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.127
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.12
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.120
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -582,7 +505,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.128
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.121
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -610,7 +533,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.122
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -625,18 +548,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.130
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.123
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -687,7 +599,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.131
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.124
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -716,7 +628,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.132
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -760,7 +672,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.133
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.126
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -789,7 +701,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.134
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.127
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -800,7 +712,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.135
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.128
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -811,7 +723,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.136
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -822,51 +734,51 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.137
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.13
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.138
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.130
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.139
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.131
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.132
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.140
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.133
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -877,7 +789,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.141
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.134
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -888,7 +800,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.142
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.135
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -899,18 +811,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.143
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.144
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.136
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -921,7 +822,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.137
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -957,7 +858,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.146
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.138
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -965,7 +866,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.147
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.139
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -994,7 +895,15 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.148
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.14
+ '
+ SELECT "posthog_sessionrecordingviewed"."session_id"
+ FROM "posthog_sessionrecordingviewed"
+ WHERE ("posthog_sessionrecordingviewed"."team_id" = 2
+ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.140
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1009,7 +918,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.141
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1060,15 +969,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15
- '
- SELECT "posthog_sessionrecordingviewed"."session_id"
- FROM "posthog_sessionrecordingviewed"
- WHERE ("posthog_sessionrecordingviewed"."team_id" = 2
- AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.150
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.142
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -1097,7 +998,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.151
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.143
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1141,7 +1042,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.152
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.144
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -1170,7 +1071,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.153
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1181,7 +1082,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.154
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.146
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1192,7 +1093,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.155
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.147
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1203,7 +1104,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.156
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.148
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1214,7 +1115,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.157
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1225,29 +1126,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.158
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.159
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.15
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1298,18 +1177,18 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.160
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.150
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.161
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.151
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1320,7 +1199,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.162
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.152
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1331,7 +1210,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.163
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.153
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1342,7 +1221,18 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.164
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.154
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.155
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -1379,7 +1269,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.156
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -1387,7 +1277,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.166
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.157
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1417,7 +1307,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.167
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.158
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1432,7 +1322,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.168
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.159
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1483,7 +1373,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.16
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -1512,7 +1402,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.160
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -1541,7 +1431,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.170
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.161
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1585,7 +1475,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.171
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.162
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -1614,7 +1504,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.172
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.163
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1625,7 +1515,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.173
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.164
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1636,7 +1526,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.174
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1647,7 +1537,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.175
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.166
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1658,7 +1548,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.176
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.167
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1669,7 +1559,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.177
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.168
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1680,7 +1570,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.178
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1691,18 +1581,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.179
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.17
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1746,29 +1625,29 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.180
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.170
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.181
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.171
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.182
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.172
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -1779,7 +1658,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.183
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.173
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -1817,7 +1696,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.184
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.174
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -1825,7 +1704,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.185
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.175
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1856,7 +1735,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.186
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.176
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1871,7 +1750,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.187
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.177
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1922,7 +1801,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.188
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.178
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -1951,7 +1830,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.189
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.179
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1995,7 +1874,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.18
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -2024,7 +1903,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.190
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.180
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -2053,7 +1932,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.191
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.181
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2064,7 +1943,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.192
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.182
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2075,7 +1954,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.193
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.183
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2086,7 +1965,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.194
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.184
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2097,6 +1976,177 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.185
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.186
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.187
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.188
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.189
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.19
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.190
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.191
+ '
+ SELECT "posthog_sessionrecording"."id",
+ "posthog_sessionrecording"."session_id",
+ "posthog_sessionrecording"."team_id",
+ "posthog_sessionrecording"."created_at",
+ "posthog_sessionrecording"."deleted",
+ "posthog_sessionrecording"."object_storage_path",
+ "posthog_sessionrecording"."distinct_id",
+ "posthog_sessionrecording"."duration",
+ "posthog_sessionrecording"."active_seconds",
+ "posthog_sessionrecording"."inactive_seconds",
+ "posthog_sessionrecording"."start_time",
+ "posthog_sessionrecording"."end_time",
+ "posthog_sessionrecording"."click_count",
+ "posthog_sessionrecording"."keypress_count",
+ "posthog_sessionrecording"."mouse_activity_count",
+ "posthog_sessionrecording"."console_log_count",
+ "posthog_sessionrecording"."console_warn_count",
+ "posthog_sessionrecording"."console_error_count",
+ "posthog_sessionrecording"."start_url",
+ "posthog_sessionrecording"."storage_version",
+ COUNT("posthog_sessionrecordingplaylistitem"."id") AS "pinned_count"
+ FROM "posthog_sessionrecording"
+ LEFT OUTER JOIN "posthog_sessionrecordingplaylistitem" ON ("posthog_sessionrecording"."session_id" = "posthog_sessionrecordingplaylistitem"."recording_id")
+ WHERE ("posthog_sessionrecording"."session_id" IN ('5',
+ '2',
+ '3',
+ '4',
+ '7',
+ '10',
+ '6',
+ '1',
+ '8',
+ '9')
+ AND "posthog_sessionrecording"."team_id" = 2)
+ GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.192
+ '
+ SELECT "posthog_sessionrecordingviewed"."session_id"
+ FROM "posthog_sessionrecordingviewed"
+ WHERE ("posthog_sessionrecordingviewed"."team_id" = 2
+ AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.193
+ '
+ SELECT "posthog_persondistinctid"."id",
+ "posthog_persondistinctid"."team_id",
+ "posthog_persondistinctid"."person_id",
+ "posthog_persondistinctid"."distinct_id",
+ "posthog_persondistinctid"."version",
+ "posthog_person"."id",
+ "posthog_person"."created_at",
+ "posthog_person"."properties_last_updated_at",
+ "posthog_person"."properties_last_operation",
+ "posthog_person"."team_id",
+ "posthog_person"."properties",
+ "posthog_person"."is_user_id",
+ "posthog_person"."is_identified",
+ "posthog_person"."uuid",
+ "posthog_person"."version"
+ FROM "posthog_persondistinctid"
+ INNER JOIN "posthog_person" ON ("posthog_persondistinctid"."person_id" = "posthog_person"."id")
+ WHERE ("posthog_persondistinctid"."distinct_id" IN ('user1',
+ 'user10',
+ 'user2',
+ 'user3',
+ 'user4',
+ 'user5',
+ 'user6',
+ 'user7',
+ 'user8',
+ 'user9')
+ AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.194
+ '
+ SELECT "posthog_persondistinctid"."id",
+ "posthog_persondistinctid"."team_id",
+ "posthog_persondistinctid"."person_id",
+ "posthog_persondistinctid"."distinct_id",
+ "posthog_persondistinctid"."version"
+ FROM "posthog_persondistinctid"
+ WHERE "posthog_persondistinctid"."person_id" IN (1,
+ 2,
+ 3,
+ 4,
+ 5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.195
'
SELECT "posthog_instancesetting"."id",
@@ -2187,7 +2237,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2314,7 +2364,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2325,7 +2375,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2336,7 +2386,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2347,7 +2397,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2358,7 +2408,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2369,7 +2419,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2380,7 +2430,7 @@
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
@@ -2397,39 +2447,6 @@
'
---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.29
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RATE_LIMIT_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -2459,7 +2476,18 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.3
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RATE_LIMIT_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.30
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -2467,7 +2495,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.31
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -2490,7 +2518,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.32
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -2505,7 +2533,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.33
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -2556,7 +2584,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.34
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -2585,7 +2613,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.35
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -2629,7 +2657,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.36
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -2658,18 +2686,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.37
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2680,7 +2697,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.38
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2691,7 +2708,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.39
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2702,18 +2719,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.4
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2724,7 +2730,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.40
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2735,29 +2741,29 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.41
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.42
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.43
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2768,7 +2774,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.44
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2779,7 +2785,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -2790,18 +2796,18 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.46
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.50
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.47
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -2832,7 +2838,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.48
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -2840,7 +2846,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -2864,7 +2870,18 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.5
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.50
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -2879,7 +2896,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.51
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -2930,7 +2947,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.52
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -2959,7 +2976,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.53
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -3003,7 +3020,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.54
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -3032,7 +3049,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.55
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3043,7 +3060,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.56
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3054,7 +3071,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.57
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3065,29 +3082,29 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.58
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.59
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.6
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3098,7 +3115,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3109,7 +3126,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.61
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3120,7 +3137,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.62
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3131,7 +3148,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.63
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3142,18 +3159,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.64
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3164,7 +3170,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -3196,18 +3202,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.66
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -3215,7 +3210,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.67
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -3240,7 +3235,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.68
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -3255,7 +3250,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -3306,7 +3301,18 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.74
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.7
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.70
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -3335,7 +3341,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.75
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.71
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -3379,7 +3385,7 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.76
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.72
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -3408,7 +3414,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.77
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.73
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3419,7 +3425,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.78
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.74
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3430,18 +3436,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.79
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.75
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3452,7 +3447,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.80
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.76
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3463,7 +3458,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.81
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.77
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3474,7 +3469,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.82
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.78
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3485,7 +3480,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.83
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.79
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3496,40 +3491,40 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.84
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.8
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.80
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.86
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.81
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
"posthog_instancesetting"."raw_value"
FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
ORDER BY "posthog_instancesetting"."id" ASC
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.87
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.82
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3540,7 +3535,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.88
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.83
'
SELECT "posthog_sessionrecording"."id",
"posthog_sessionrecording"."session_id",
@@ -3573,7 +3568,7 @@
GROUP BY "posthog_sessionrecording"."id" /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.84
'
SELECT "posthog_sessionrecordingviewed"."session_id"
FROM "posthog_sessionrecordingviewed"
@@ -3581,18 +3576,7 @@
AND "posthog_sessionrecordingviewed"."user_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9
- '
- SELECT "posthog_instancesetting"."id",
- "posthog_instancesetting"."key",
- "posthog_instancesetting"."raw_value"
- FROM "posthog_instancesetting"
- WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
- ORDER BY "posthog_instancesetting"."id" ASC
- LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
- '
----
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.90
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -3618,7 +3602,7 @@
AND "posthog_persondistinctid"."team_id" = 2) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.91
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.86
'
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -3633,7 +3617,7 @@
5 /* ... */) /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.92
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.87
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -3684,7 +3668,7 @@
LIMIT 21
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.93
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.88
'
SELECT "posthog_user"."id",
"posthog_user"."password",
@@ -3713,7 +3697,7 @@
LIMIT 21 /**/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.94
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89
'
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -3757,7 +3741,18 @@
LIMIT 21 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.95
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.9
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.90
'
SELECT "posthog_organizationmembership"."id",
"posthog_organizationmembership"."organization_id",
@@ -3786,7 +3781,7 @@
WHERE "posthog_organizationmembership"."user_id" = 2 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.96
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.91
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3797,7 +3792,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.97
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.92
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3808,7 +3803,7 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.98
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.93
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3819,7 +3814,29 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
-# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.99
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.94
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.95
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_V2_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.96
'
SELECT "posthog_instancesetting"."id",
"posthog_instancesetting"."key",
@@ -3830,3 +3847,36 @@
LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
'
---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.97
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.98
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:RECORDINGS_TTL_WEEKS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
+# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.99
+ '
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:AGGREGATE_BY_DISTINCT_IDS_TEAMS'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_session_recordings-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/session_recordings/%3F%24'*/
+ '
+---
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index af0efd4023fe7..245b0ceb08720 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -157,6 +159,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -168,6 +171,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -267,6 +271,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -278,6 +283,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -468,6 +474,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -479,6 +486,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -626,6 +634,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -637,6 +646,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -795,6 +805,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -806,6 +817,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -951,6 +963,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -962,6 +975,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1181,6 +1195,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1192,6 +1207,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1230,6 +1246,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1241,6 +1258,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1376,6 +1394,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1387,6 +1406,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1478,6 +1498,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1489,6 +1510,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1527,6 +1549,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1538,6 +1561,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1671,6 +1695,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1682,6 +1707,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1789,6 +1815,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1800,6 +1827,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2040,6 +2068,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2051,6 +2080,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2272,6 +2302,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2283,6 +2314,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2390,6 +2422,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2401,6 +2434,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2501,6 +2535,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2512,6 +2547,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2612,6 +2648,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2623,6 +2660,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2703,6 +2741,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2714,6 +2753,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2845,6 +2885,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2856,6 +2897,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2933,6 +2975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2944,6 +2987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3048,6 +3092,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3059,6 +3104,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3163,6 +3209,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3174,6 +3221,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3289,6 +3337,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3300,6 +3349,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3600,6 +3650,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3611,6 +3662,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3750,6 +3802,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3761,6 +3814,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3874,6 +3928,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3885,6 +3940,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3951,6 +4007,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3962,6 +4019,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4104,6 +4162,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4115,6 +4174,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4153,6 +4213,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4164,6 +4225,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4268,6 +4330,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4279,6 +4342,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4409,6 +4473,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4420,6 +4485,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4825,6 +4891,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4836,6 +4903,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4956,6 +5024,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4967,6 +5036,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5033,6 +5103,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5044,6 +5115,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5148,6 +5220,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5159,6 +5232,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5224,6 +5298,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5235,6 +5310,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5273,6 +5349,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5284,6 +5361,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5388,6 +5466,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5399,6 +5478,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5520,6 +5600,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5531,6 +5612,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5674,6 +5756,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5685,6 +5768,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6072,6 +6156,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6083,6 +6168,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6211,6 +6297,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6222,6 +6309,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6383,6 +6471,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6394,6 +6483,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6541,6 +6631,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6552,6 +6643,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6671,6 +6763,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6682,6 +6775,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6752,6 +6846,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6763,6 +6858,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6908,6 +7004,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6919,6 +7016,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7538,6 +7636,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7549,6 +7648,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7780,6 +7880,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7791,6 +7892,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7933,6 +8035,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7944,6 +8047,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7982,6 +8086,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7993,6 +8098,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8097,6 +8203,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8108,6 +8215,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8238,6 +8346,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8249,6 +8358,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8353,6 +8463,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8364,6 +8475,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8480,6 +8592,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8491,6 +8604,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8612,6 +8726,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8623,6 +8738,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8912,6 +9028,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8923,6 +9040,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9058,6 +9176,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9069,6 +9188,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9153,6 +9273,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9164,6 +9285,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9274,6 +9396,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9285,6 +9408,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9392,6 +9516,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9403,6 +9528,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9513,6 +9639,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9524,6 +9651,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9685,6 +9813,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9696,6 +9825,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9834,6 +9964,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9845,6 +9976,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9929,6 +10061,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9940,6 +10073,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10081,6 +10215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10092,6 +10227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10248,6 +10384,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10259,6 +10396,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10350,6 +10488,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10361,6 +10500,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10502,6 +10642,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10513,6 +10654,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10631,6 +10773,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10642,6 +10785,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10831,6 +10975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10842,6 +10987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index 396f5103c7ec3..299074ec3d44b 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -76,6 +78,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -94,6 +97,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -120,6 +124,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -131,6 +136,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -169,6 +175,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -180,6 +187,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -277,6 +285,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -288,6 +297,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -479,6 +489,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -490,6 +501,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -555,6 +567,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -572,6 +585,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -583,6 +597,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -671,6 +686,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -682,6 +698,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py
index 3f49024d708e9..1b7f36ae54ce3 100644
--- a/posthog/api/test/notebooks/test_notebook.py
+++ b/posthog/api/test/notebooks/test_notebook.py
@@ -1,4 +1,4 @@
-from typing import List, Dict, Optional
+from typing import List, Dict
from unittest import mock
from freezegun import freeze_time
@@ -67,17 +67,20 @@ def test_cannot_list_deleted_notebook(self) -> None:
@parameterized.expand(
[
- ("without_content", None),
- ("with_content", {"some": "kind", "of": "tip", "tap": "content"}),
+ ("without_content", None, None),
+ ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"),
]
)
- def test_create_a_notebook(self, _, content: Optional[Dict]) -> None:
- response = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={"content": content})
+ def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content}
+ )
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == {
"id": response.json()["id"],
"short_id": response.json()["short_id"],
"content": content,
+ "text_content": text_content,
"title": None,
"version": 0,
"created_at": mock.ANY,
diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py
index 4e9f9370c178d..5f634de548fc7 100644
--- a/posthog/api/test/notebooks/test_notebook_filtering.py
+++ b/posthog/api/test/notebooks/test_notebook_filtering.py
@@ -42,7 +42,7 @@
},
}
-BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}]}
+BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text}
class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest):
@@ -62,20 +62,22 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit
@parameterized.expand(
[
- ["some text", [0]],
- ["other text", [1]],
- ["text", [0, 1]],
+ ["i ride", [0]],
+ ["pony", [0]],
+ ["ponies", [0]],
+ ["my hobby", [1]],
+ ["around", [0, 1]],
["random", []],
]
)
def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None:
notebook_ids = [
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="some text"),
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="other text"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"),
]
response = self.client.get(
- f"/api/projects/{self.team.id}/notebooks?s={search_text}",
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
)
assert response.status_code == status.HTTP_200_OK
@@ -83,6 +85,32 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes:
assert len(results) == len(expected_match_indexes)
assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+ @parameterized.expand(
+ [
+ ["pony", [0]],
+ ["pOnY", [0]],
+ ["ponies", [0]],
+ ["goat", [1]],
+ ["ride", [0, 1]],
+ ["neither", []],
+ ]
+ )
+ def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None:
+ notebook_ids = [
+ # will match both pony and ponies
+ self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"),
+ self._create_notebook_with_content([BASIC_TEXT("but may not ride a goat")], title="never matches"),
+ ]
+
+ response = self.client.get(
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ results = response.json()["results"]
+ assert len(results) == len(expected_match_indexes)
+ assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+
def test_filters_based_on_params(self) -> None:
other_user = User.objects.create_and_join(self.organization, "other@posthog.com", "password")
notebook_one = Notebook.objects.create(team=self.team, created_by=self.user)
diff --git a/posthog/api/test/test_annotation.py b/posthog/api/test/test_annotation.py
index 82089a5c7ea6b..c559411f607d5 100644
--- a/posthog/api/test/test_annotation.py
+++ b/posthog/api/test/test_annotation.py
@@ -1,7 +1,7 @@
from datetime import datetime
from unittest.mock import patch
-import pytz
+from zoneinfo import ZoneInfo
from django.utils.timezone import now
from rest_framework import status
@@ -111,7 +111,7 @@ def test_creating_annotation(self, mock_capture):
"team": team2.pk, # make sure this is set automatically
},
)
- date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=pytz.UTC)
+ date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
instance = Annotation.objects.get(pk=response.json()["id"])
self.assertEqual(instance.content, "Marketing campaign")
diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py
index 8679485030bf6..b7f746c84a473 100644
--- a/posthog/api/test/test_event.py
+++ b/posthog/api/test/test_event.py
@@ -3,7 +3,7 @@
from unittest.mock import patch
from urllib.parse import unquote, urlencode
-import pytz
+from zoneinfo import ZoneInfo
from dateutil import parser
from dateutil.relativedelta import relativedelta
from django.utils import timezone
@@ -168,7 +168,6 @@ def test_custom_event_values(self):
@also_test_with_materialized_columns(["random_prop"])
@snapshot_clickhouse_queries
def test_event_property_values(self):
-
with freeze_time("2020-01-10"):
_create_event(
distinct_id="bla",
@@ -346,8 +345,8 @@ def test_pagination_bounded_date_range(self):
with freeze_time("2021-10-10T12:03:03.829294Z"):
_create_person(team=self.team, distinct_ids=["1"])
now = timezone.now() - relativedelta(months=11)
- after = (now).astimezone(pytz.utc).isoformat()
- before = (now + relativedelta(days=23)).astimezone(pytz.utc).isoformat()
+ after = (now).astimezone(ZoneInfo("UTC")).isoformat()
+ before = (now + relativedelta(days=23)).astimezone(ZoneInfo("UTC")).isoformat()
params = {"distinct_id": "1", "after": after, "before": before, "limit": 10}
params_string = urlencode(params)
for idx in range(0, 25):
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index 9efebf97b878b..b0d6f73c87ebb 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -939,7 +939,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -950,7 +950,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py
index a9697252901f4..8becdf0ac7e60 100644
--- a/posthog/api/test/test_insight.py
+++ b/posthog/api/test/test_insight.py
@@ -5,7 +5,7 @@
from unittest.case import skip
from unittest.mock import patch
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from django.utils import timezone
from freezegun import freeze_time
@@ -1860,7 +1860,7 @@ def test_create_insight_viewed(self) -> None:
self.assertEqual(created_insight_viewed.user, self.user)
self.assertEqual(
created_insight_viewed.last_viewed_at,
- datetime(2022, 3, 22, 0, 0, tzinfo=pytz.UTC),
+ datetime(2022, 3, 22, 0, 0, tzinfo=ZoneInfo("UTC")),
)
def test_update_insight_viewed(self) -> None:
@@ -1882,7 +1882,7 @@ def test_update_insight_viewed(self) -> None:
updated_insight_viewed = InsightViewed.objects.all()[0]
self.assertEqual(
updated_insight_viewed.last_viewed_at,
- datetime(2022, 3, 23, 0, 0, tzinfo=pytz.UTC),
+ datetime(2022, 3, 23, 0, 0, tzinfo=ZoneInfo("UTC")),
)
def test_cant_view_insight_viewed_for_insight_in_another_team(self) -> None:
diff --git a/posthog/api/test/test_organization_domain.py b/posthog/api/test/test_organization_domain.py
index fe4a4e5afa12a..2615880cbff20 100644
--- a/posthog/api/test/test_organization_domain.py
+++ b/posthog/api/test/test_organization_domain.py
@@ -4,7 +4,7 @@
import dns.resolver
import dns.rrset
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.utils import timezone
from freezegun import freeze_time
from rest_framework import status
@@ -133,7 +133,7 @@ def test_creating_domain_on_self_hosted_is_automatically_verified(self):
instance = OrganizationDomain.objects.get(id=response_data["id"])
self.assertEqual(instance.domain, "the.posthog.com")
- self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC))
+ self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")))
self.assertEqual(instance.last_verification_retry, None)
self.assertEqual(instance.sso_enforcement, "")
@@ -200,7 +200,7 @@ def test_can_request_verification_for_unverified_domains(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ"))
self.assertEqual(response_data["is_verified"], True)
- self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC))
+ self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")))
self.assertEqual(self.domain.is_verified, True)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -220,7 +220,7 @@ def test_domain_is_not_verified_with_missing_challenge(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -240,7 +240,7 @@ def test_domain_is_not_verified_with_missing_domain(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -262,7 +262,7 @@ def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
def test_cannot_request_verification_for_verified_domains(self):
diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py
index d393b00910ab3..c9ae3b26c359a 100644
--- a/posthog/api/test/test_plugin.py
+++ b/posthog/api/test/test_plugin.py
@@ -5,7 +5,7 @@
from unittest import mock
from unittest.mock import ANY, patch
-import pytz
+from zoneinfo import ZoneInfo
from django.core.files.uploadedfile import SimpleUploadedFile
from freezegun import freeze_time
from rest_framework import status
@@ -269,7 +269,7 @@ def test_update_plugin_auth(self, mock_sync_from_plugin_archive, mock_get, mock_
plugin = Plugin.objects.get(id=response.json()["id"])
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertNotEqual(plugin.updated_at, fake_date)
with freeze_time(fake_date.isoformat()):
@@ -715,7 +715,7 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload):
name="FooBar2", plugins_access_level=Organization.PluginsAccessLevel.INSTALL
)
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
with freeze_time(fake_date.isoformat()):
response = self.client.post(
f"/api/organizations/{my_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"}
@@ -1281,7 +1281,7 @@ def test_check_for_updates_plugins_reload_not_called(self, _, mock_reload):
plugin_id = response.json()["id"]
plugin = Plugin.objects.get(id=plugin_id)
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertNotEqual(plugin.latest_tag_checked_at, fake_date)
with freeze_time(fake_date.isoformat()):
diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py
index c4439a8913b31..82e89c7805c5c 100644
--- a/posthog/api/test/test_signup.py
+++ b/posthog/api/test/test_signup.py
@@ -5,7 +5,7 @@
from unittest.mock import ANY, patch
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.core import mail
from django.urls.base import reverse
from django.utils import timezone
@@ -733,7 +733,7 @@ def test_api_invite_sign_up_prevalidate_expired_invite(self):
invite: OrganizationInvite = OrganizationInvite.objects.create(
target_email="test+59@posthog.com", organization=self.organization
)
- invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=pytz.UTC)
+ invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=ZoneInfo("UTC"))
invite.save()
response = self.client.get(f"/api/signup/{invite.id}/")
@@ -1132,7 +1132,7 @@ def test_cant_claim_expired_invite(self):
invite: OrganizationInvite = OrganizationInvite.objects.create(
target_email="test+799@posthog.com", organization=self.organization
)
- invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=pytz.UTC)
+ invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=ZoneInfo("UTC"))
invite.save()
response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Charlie", "password": "test_password"})
diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py
index 45e13024c1a0b..f393e5cec4379 100644
--- a/posthog/api/test/test_survey.py
+++ b/posthog/api/test/test_survey.py
@@ -77,6 +77,80 @@ def test_can_create_survey_with_linked_flag_and_targeting(self):
{"type": "open", "question": "What would you want to improve from notebooks?"}
]
+ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None:
+ self.maxDiff = None
+
+ ff_key = "notebooks"
+ notebooks_flag = FeatureFlag.objects.create(team=self.team, key=ff_key, created_by=self.user)
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks power users survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists()
+
+ created_survey1 = response.json()["id"]
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks random survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert response_data["targeting_flag"] is None
+
+ created_survey2 = response.json()["id"]
+
+ # add another random feature flag
+ self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}},
+ format="json",
+ ).json()
+
+ with self.assertNumQueries(12):
+ response = self.client.get(f"/api/projects/{self.team.id}/feature_flags")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ result = response.json()
+
+ self.assertEqual(result["count"], 2)
+
+ self.assertEqual(
+ [(res["key"], [survey["id"] for survey in res["surveys"]]) for res in result["results"]],
+ [("flag_0", []), (ff_key, [created_survey1, created_survey2])],
+ )
+
def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self):
survey_with_targeting = self.client.post(
f"/api/projects/{self.team.id}/surveys/",
diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py
index 5aa0fa7d18e22..b5eb182e68a70 100644
--- a/posthog/batch_exports/service.py
+++ b/posthog/batch_exports/service.py
@@ -52,6 +52,8 @@ class S3BatchExportInputs:
data_interval_end: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
@dataclass
diff --git a/posthog/caching/test/test_should_refresh_insight.py b/posthog/caching/test/test_should_refresh_insight.py
index 26fcfaf01531a..12fb385ef2926 100644
--- a/posthog/caching/test/test_should_refresh_insight.py
+++ b/posthog/caching/test/test_should_refresh_insight.py
@@ -1,9 +1,9 @@
from datetime import datetime, timedelta
from time import sleep
from unittest.mock import patch
+from zoneinfo import ZoneInfo
from django.http import HttpRequest
-import pytz
from freezegun import freeze_time
from rest_framework.request import Request
from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME
@@ -25,7 +25,7 @@ def __init__(self, *args, **kwargs) -> None:
def test_should_return_true_if_refresh_not_requested(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
# .GET["refresh"] is absent in the request below!
@@ -47,7 +47,7 @@ def test_should_return_true_if_refresh_not_requested(self):
def test_should_return_true_if_refresh_requested(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
should_refresh_now, refresh_frequency = should_refresh_insight(insight, None, request=self.refresh_request)
@@ -67,7 +67,7 @@ def test_should_return_true_if_insight_does_not_have_last_refresh(self):
def test_shared_insights_can_be_refreshed_less_often(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
should_refresh_now, refresh_frequency = should_refresh_insight(
@@ -130,7 +130,7 @@ def test_dashboard_filters_should_override_insight_filters_when_deciding_on_refr
def test_should_return_true_if_was_recently_refreshed(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC"))
+ last_refresh=datetime.now(tz=ZoneInfo("UTC"))
)
request = HttpRequest()
@@ -143,10 +143,10 @@ def test_should_return_true_if_was_recently_refreshed(self):
def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mock_sleep):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1),
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1),
# This insight is being calculated _somewhere_, since it was last refreshed
# earlier than the recent refresh has been queued
- last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC"))
+ last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC"))
- timedelta(seconds=CLICKHOUSE_MAX_EXECUTION_TIME - 0.5), # Half a second before timeout
)
@@ -161,10 +161,10 @@ def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mo
def test_should_return_true_if_refresh_timed_out_elsewhere_before(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1),
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1),
# last_refresh is earlier than last_refresh_queued_at BUT last_refresh_queued_at is more than
# CLICKHOUSE_MAX_EXECUTION_TIME seconds ago. This means the query CANNOT be running at this time.
- last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC")) - timedelta(seconds=500),
+ last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=500),
)
should_refresh_now, _ = should_refresh_insight(insight, None, request=self.refresh_request)
diff --git a/posthog/clickhouse/migrations/0048_session_replay_events_count.py b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
new file mode 100644
index 0000000000000..d4676e2794884
--- /dev/null
+++ b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
@@ -0,0 +1,26 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.models.session_replay_event.migrations_sql import (
+ DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+from posthog.models.session_replay_event.sql import (
+ SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+
+operations = [
+ # we have to drop materialized view first so that we're no longer pulling from kakfa
+ # then we drop the kafka table
+ run_sql_with_exceptions(DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+ run_sql_with_exceptions(DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # now we can alter the target tables
+ run_sql_with_exceptions(ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # and then recreate the materialized views and kafka tables
+ run_sql_with_exceptions(KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+]
diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py
index 2f0924b080319..417525330a96c 100644
--- a/posthog/clickhouse/system_status.py
+++ b/posthog/clickhouse/system_status.py
@@ -1,7 +1,7 @@
from datetime import timedelta
from os.path import abspath, dirname, join
from typing import Dict, Generator, List, Tuple
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.relativedelta import relativedelta
from django.utils import timezone
@@ -103,7 +103,7 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
last_event_ingested_timestamp = sync_execute("SELECT max(_timestamp) FROM events")[0][0]
# Therefore we can confidently apply the UTC timezone
- last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=pytz.UTC)
+ last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=ZoneInfo("UTC"))
yield {
"key": "last_event_ingested_timestamp",
diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
index 36ab529259c77..ac21b1ac5989f 100644
--- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr
+++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
@@ -336,7 +336,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -922,7 +924,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('kafka:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -1344,7 +1348,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_session_replay_events', sipHash64(distinct_id))
'
@@ -1377,7 +1389,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
- sum(size) as size
+ sum(size) as size,
+ -- we can count the number of kafka messages instead of sending it explicitly
+ sum(message_count) as message_count,
+ sum(event_count) as event_count,
+ max(_timestamp) as _timestamp
FROM posthog_test.kafka_session_replay_events
group by session_id, team_id
@@ -1608,7 +1624,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
@@ -2226,7 +2250,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py
index dd337d487aba7..f0d33c7d617f4 100644
--- a/posthog/clickhouse/test/test_person_overrides.py
+++ b/posthog/clickhouse/test/test_person_overrides.py
@@ -5,7 +5,7 @@
from uuid import UUID, uuid4
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from kafka import KafkaProducer
from posthog.clickhouse.client import sync_execute
@@ -35,9 +35,9 @@ def test_can_insert_person_overrides():
old_person_id = uuid4()
override_person_id = uuid4()
oldest_event_string = "2020-01-01 00:00:00"
- oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=pytz.UTC)
+ oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=ZoneInfo("UTC"))
merged_at_string = "2020-01-02 00:00:00"
- merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=pytz.UTC)
+ merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=ZoneInfo("UTC"))
message = {
"team_id": 1,
"old_person_id": str(old_person_id),
@@ -82,7 +82,7 @@ def test_can_insert_person_overrides():
[result] = results
created_at, *the_rest = result
assert the_rest == [1, old_person_id, override_person_id, oldest_event, merged_at, 2]
- assert created_at > datetime.now(tz=pytz.UTC) - timedelta(seconds=10)
+ assert created_at > datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=10)
finally:
producer.close()
diff --git a/posthog/datetime.py b/posthog/datetime.py
index 8dc6b6975fb8d..b8c4910e8b374 100644
--- a/posthog/datetime.py
+++ b/posthog/datetime.py
@@ -1,13 +1,22 @@
from datetime import datetime, timedelta
-def end_of_day(reference_date: datetime):
- return datetime(
- year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo
- ) + timedelta(days=1, microseconds=-1)
+def start_of_hour(dt: datetime) -> datetime:
+ return datetime(year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, tzinfo=dt.tzinfo)
-def start_of_day(reference_date: datetime):
- return datetime(
- year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo
- )
+def start_of_day(dt: datetime):
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo)
+
+
+def end_of_day(dt: datetime):
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) + timedelta(days=1, microseconds=-1)
+
+
+def start_of_week(dt: datetime) -> datetime:
+ # weeks start on sunday
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) - timedelta(days=(dt.weekday() + 1) % 7)
+
+
+def start_of_month(dt: datetime) -> datetime:
+ return datetime(year=dt.year, month=dt.month, day=1, tzinfo=dt.tzinfo)
diff --git a/posthog/decorators.py b/posthog/decorators.py
index 2cefc1bb23f53..19b1bc33f98ae 100644
--- a/posthog/decorators.py
+++ b/posthog/decorators.py
@@ -1,7 +1,10 @@
+from datetime import datetime
from enum import Enum
from functools import wraps
from typing import Any, Callable, Dict, List, TypeVar, Union, cast
+from zoneinfo import ZoneInfo
+import posthoganalytics
from django.urls import resolve
from django.utils.timezone import now
from rest_framework.request import Request
@@ -9,8 +12,15 @@
from statshog.defaults.django import statsd
from posthog.clickhouse.query_tagging import tag_queries
+from posthog.cloud_utils import is_cloud
+from posthog.datetime import start_of_day, start_of_hour, start_of_month, start_of_week
from posthog.models import User
+from posthog.models.filters.filter import Filter
+from posthog.models.filters.path_filter import PathFilter
+from posthog.models.filters.retention_filter import RetentionFilter
+from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.filters.utils import get_filter
+from posthog.models.team.team import Team
from posthog.utils import refresh_requested_by_client
from .utils import generate_cache_key, get_safe_cache
@@ -74,9 +84,12 @@ def wrapper(self, request) -> T:
route = "unknown"
if cached_result_package and cached_result_package.get("result"):
- cached_result_package["is_cached"] = True
- statsd.incr("posthog_cached_function_cache_hit", tags={"route": route})
- return cached_result_package
+ if not is_stale(team, filter, cached_result_package):
+ cached_result_package["is_cached"] = True
+ statsd.incr("posthog_cached_function_cache_hit", tags={"route": route})
+ return cached_result_package
+ else:
+ statsd.incr("posthog_cached_function_cache_stale", tags={"route": route})
else:
statsd.incr("posthog_cached_function_cache_miss", tags={"route": route})
@@ -93,3 +106,49 @@ def wrapper(self, request) -> T:
return fresh_result_package
return wrapper
+
+
+def stale_cache_invalidation_disabled(team: Team) -> bool:
+ """Can be disabled temporarly to help in cases of service degradation."""
+ if is_cloud(): # on PostHog Cloud, use the feature flag
+ return not posthoganalytics.feature_enabled(
+ "stale-cache-invalidation-enabled",
+ str(team.uuid),
+ groups={"organization": str(team.organization.id)},
+ group_properties={
+ "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at}
+ },
+ only_evaluate_locally=True,
+ send_feature_flag_events=False,
+ )
+ else:
+ return False
+
+
+def is_stale(team: Team, filter: Filter | RetentionFilter | StickinessFilter | PathFilter, cached_result: Any) -> bool:
+ """Indicates wether a cache item is obviously outdated based on filters,
+ i.e. the next time interval was entered since the last computation. For
+ example an insight with -7d date range that was last computed yesterday.
+ The same insight refreshed today wouldn't be marked as stale.
+ """
+
+ if stale_cache_invalidation_disabled(team):
+ return False
+
+ last_refresh = cached_result.get("last_refresh", None)
+ date_to = min([filter.date_to, datetime.now(tz=ZoneInfo("UTC"))]) # can't be later than now
+ interval = filter.period.lower() if isinstance(filter, RetentionFilter) else filter.interval
+
+ if last_refresh is None:
+ raise Exception("Cached results require a last_refresh")
+
+ if interval == "hour":
+ return start_of_hour(date_to) > start_of_hour(last_refresh)
+ elif interval == "day":
+ return start_of_day(date_to) > start_of_day(last_refresh)
+ elif interval == "week":
+ return start_of_week(date_to) > start_of_week(last_refresh)
+ elif interval == "month":
+ return start_of_month(date_to) > start_of_month(last_refresh)
+ else:
+ return False
diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py
index 68272413aa2ba..132f3d6ac5f32 100644
--- a/posthog/demo/products/hedgebox/models.py
+++ b/posthog/demo/products/hedgebox/models.py
@@ -14,6 +14,7 @@
)
import pytz
+from zoneinfo import ZoneInfo
from posthog.demo.matrix.models import Effect, SimPerson, SimSessionIntent
@@ -673,7 +674,7 @@ def upgrade_plan(self):
if not self.account.was_billing_scheduled:
self.account.was_billing_scheduled = True
future_months = math.ceil(
- (self.cluster.end.astimezone(pytz.timezone(self.timezone)) - self.cluster.simulation_time).days / 30
+ (self.cluster.end.astimezone(ZoneInfo(self.timezone)) - self.cluster.simulation_time).days / 30
)
for i in range(future_months):
bill_timestamp = self.cluster.simulation_time + dt.timedelta(days=30 * i)
diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py
index 27463e1bd692d..99f0451c5485d 100644
--- a/posthog/demo/test/test_matrix_manager.py
+++ b/posthog/demo/test/test_matrix_manager.py
@@ -2,7 +2,7 @@
from enum import auto
from typing import Optional
-import pytz
+from zoneinfo import ZoneInfo
from posthog.client import sync_execute
from posthog.demo.matrix.manager import MatrixManager
@@ -54,7 +54,9 @@ class TestMatrixManager(ClickhouseDestroyTablesMixin):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
- cls.matrix = DummyMatrix(n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), days_future=0)
+ cls.matrix = DummyMatrix(
+ n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), days_future=0
+ )
cls.matrix.simulate()
def test_reset_master(self):
diff --git a/posthog/errors.py b/posthog/errors.py
index 5cd3342f7a3fa..b2d34ed858448 100644
--- a/posthog/errors.py
+++ b/posthog/errors.py
@@ -206,7 +206,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta:
131: ErrorCodeMeta("TOO_LARGE_STRING_SIZE"),
133: ErrorCodeMeta("AGGREGATE_FUNCTION_DOESNT_ALLOW_PARAMETERS"),
134: ErrorCodeMeta("PARAMETERS_TO_AGGREGATE_FUNCTIONS_MUST_BE_LITERALS"),
- 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX"),
+ 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX", user_safe=True),
137: ErrorCodeMeta("UNKNOWN_ELEMENT_IN_CONFIG"),
138: ErrorCodeMeta("EXCESSIVE_ELEMENT_IN_CONFIG"),
139: ErrorCodeMeta("NO_ELEMENTS_IN_CONFIG"),
diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py
index c4f1980df5491..b8d79e86d9780 100644
--- a/posthog/hogql/database/schema/session_replay_events.py
+++ b/posthog/hogql/database/schema/session_replay_events.py
@@ -31,6 +31,8 @@
"console_warn_count": IntegerDatabaseField(name="console_warn_count"),
"console_error_count": IntegerDatabaseField(name="console_error_count"),
"size": IntegerDatabaseField(name="size"),
+ "event_count": IntegerDatabaseField(name="event_count"),
+ "message_count": IntegerDatabaseField(name="message_count"),
"pdi": LazyJoin(
from_field="distinct_id",
join_table=PersonDistinctIdsTable(),
@@ -77,6 +79,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str
"console_error_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "console_error_count"])]),
"distinct_id": ast.Call(name="any", args=[ast.Field(chain=[table_name, "distinct_id"])]),
"size": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "size"])]),
+ "event_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "event_count"])]),
+ "message_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "message_count"])]),
}
select_fields: List[ast.Expr] = []
diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr
index 166391d344856..9e1413d84a0bf 100644
--- a/posthog/hogql/database/test/__snapshots__/test_database.ambr
+++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr
@@ -276,6 +276,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -405,6 +413,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -849,6 +865,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -978,6 +1002,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py
index 670b98cfd45e5..bd63ce32754c0 100644
--- a/posthog/hogql/placeholders.py
+++ b/posthog/hogql/placeholders.py
@@ -32,7 +32,7 @@ def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]):
def visit_placeholder(self, node):
if not self.placeholders:
raise HogQLException(f"Placeholders, such as {{{node.field}}}, are not supported in this context")
- if node.field in self.placeholders:
+ if node.field in self.placeholders and self.placeholders[node.field] is not None:
new_node = self.placeholders[node.field]
new_node.start = node.start
new_node.end = node.end
diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py
index 3caa10d51f8f6..81efafc225a1f 100644
--- a/posthog/hogql/property.py
+++ b/posthog/hogql/property.py
@@ -15,7 +15,7 @@
from posthog.models.property import PropertyGroup
from posthog.models.property.util import build_selector_regex
from posthog.models.property_definition import PropertyType
-from posthog.schema import PropertyOperator
+from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator
def has_aggregation(expr: AST) -> bool:
@@ -59,16 +59,30 @@ def property_to_expr(property: Union[BaseModel, PropertyGroup, Property, dict, l
return ast.And(exprs=properties)
elif isinstance(property, Property):
pass
- elif isinstance(property, PropertyGroup):
- if property.type != PropertyOperatorType.AND and property.type != PropertyOperatorType.OR:
+ elif (
+ isinstance(property, PropertyGroup)
+ or isinstance(property, PropertyGroupFilter)
+ or isinstance(property, PropertyGroupFilterValue)
+ ):
+ if (
+ isinstance(property, PropertyGroup)
+ and property.type != PropertyOperatorType.AND
+ and property.type != PropertyOperatorType.OR
+ ):
raise NotImplementedException(f'PropertyGroup of unknown type "{property.type}"')
+ if (
+ (isinstance(property, PropertyGroupFilter) or isinstance(property, PropertyGroupFilterValue))
+ and property.type != FilterLogicalOperator.AND
+ and property.type != FilterLogicalOperator.OR
+ ):
+ raise NotImplementedException(f'PropertyGroupFilter of unknown type "{property.type}"')
if len(property.values) == 0:
return ast.Constant(value=True)
if len(property.values) == 1:
return property_to_expr(property.values[0], team)
- if property.type == PropertyOperatorType.AND:
+ if property.type == PropertyOperatorType.AND or property.type == FilterLogicalOperator.AND:
return ast.And(exprs=[property_to_expr(p, team) for p in property.values])
else:
return ast.Or(exprs=[property_to_expr(p, team) for p in property.values])
diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py
index e939d9ce8aef6..ed84eeaf4af6d 100644
--- a/posthog/hogql/test/test_query.py
+++ b/posthog/hogql/test/test_query.py
@@ -1,6 +1,6 @@
from uuid import UUID
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from django.utils import timezone
from freezegun import freeze_time
@@ -817,21 +817,21 @@ def test_window_functions_simple(self):
expected += [
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")),
"random event",
[],
["random bla", "random boo"],
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")),
"random bla",
["random event"],
["random boo"],
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")),
"random boo",
["random event", "random bla"],
[],
@@ -902,7 +902,7 @@ def test_window_functions_with_window(self):
expected += [
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")),
"random event",
[],
["random bla", "random boo"],
@@ -917,7 +917,7 @@ def test_window_functions_with_window(self):
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")),
"random bla",
["random event"],
["random boo"],
@@ -932,7 +932,7 @@ def test_window_functions_with_window(self):
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")),
"random boo",
["random event", "random bla"],
[],
@@ -1226,7 +1226,7 @@ def test_null_equality(self):
("null", "!~*", "null", 0),
]
- for (a, op, b, res) in expected:
+ for a, op, b, res in expected:
# works when selecting directly
query = f"select {a} {op} {b}"
response = execute_hogql_query(query, team=self.team)
diff --git a/posthog/hogql_queries/lifecycle_hogql_query.py b/posthog/hogql_queries/lifecycle_hogql_query.py
deleted file mode 100644
index 2df71a976d1a9..0000000000000
--- a/posthog/hogql_queries/lifecycle_hogql_query.py
+++ /dev/null
@@ -1,176 +0,0 @@
-from typing import Dict, Any
-
-from django.utils.timezone import datetime
-
-from posthog.hogql import ast
-from posthog.hogql.parser import parse_expr, parse_select
-from posthog.hogql.query import execute_hogql_query
-from posthog.hogql_queries.query_date_range import QueryDateRange
-from posthog.models import Team
-from posthog.schema import LifecycleQuery
-
-
-def create_time_filter(date_range: QueryDateRange) -> ast.Expr:
- # don't need timezone here, as HogQL will use the project timezone automatically
- # :TRICKY: We fetch all data even for the period before the graph starts up until the end of the last period
- time_filter = parse_expr(
- """
- (timestamp >= dateTrunc({interval}, {date_from}) - {one_interval_period})
- AND
- (timestamp < dateTrunc({interval}, {date_to}) + {one_interval_period})
- """,
- placeholders={
- "date_from": date_range.date_from_as_hogql,
- "date_to": date_range.date_to_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
- "interval": date_range.interval_period_string_as_hogql,
- },
- )
-
- return time_filter
-
-
-def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr):
- if not event_filter:
- event_filter = ast.Constant(value=True)
-
- placeholders = {
- "event_filter": event_filter,
- "interval": date_range.interval_period_string_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
- }
-
- events_query = parse_select(
- """
- SELECT
- events.person.id as person_id,
- min(events.person.created_at) AS created_at,
- arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity,
- arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity,
- arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity,
- arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status,
- arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status,
- arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods,
- arrayMap(x -> 'dormant', dormant_periods) as dormant_label,
- arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat,
- arrayJoin(temp_concat) as period_status_pairs,
- period_status_pairs.1 as start_of_period,
- period_status_pairs.2 as status
- FROM events
- WHERE {event_filter}
- GROUP BY person_id
- """,
- placeholders=placeholders,
- )
- return events_query
-
-
-def run_lifecycle_query(
- team: Team,
- query: LifecycleQuery,
-) -> Dict[str, Any]:
- now_dt = datetime.now()
-
- query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt)
-
- interval = query_date_range.interval.name
- one_interval_period = query_date_range.one_interval_period_as_hogql
- number_interval_period = query_date_range.interval_periods_as_hogql("number")
-
- time_filter = create_time_filter(query_date_range)
- event_filter = time_filter # TODO: add all other filters
-
- placeholders = {
- "interval": ast.Constant(value=interval),
- "one_interval_period": one_interval_period,
- "number_interval_period": number_interval_period,
- "event_filter": event_filter,
- "date_from": query_date_range.date_from_as_hogql,
- "date_to": query_date_range.date_to_as_hogql,
- }
-
- events_query = create_events_query(date_range=query_date_range, event_filter=event_filter)
-
- periods = parse_select(
- """
- SELECT (
- dateTrunc({interval}, {date_to}) - {number_interval_period}
- ) AS start_of_period
- FROM numbers(
- dateDiff(
- {interval},
- dateTrunc({interval}, {date_from}),
- dateTrunc({interval}, {date_to} + {one_interval_period})
- )
- )
- """,
- placeholders=placeholders,
- )
-
- lifecycle_sql = parse_select(
- """
- SELECT groupArray(start_of_period) AS date,
- groupArray(counts) AS total,
- status
- FROM (
- SELECT
- status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
- start_of_period,
- status
- FROM (
- SELECT
- periods.start_of_period as start_of_period,
- 0 AS counts,
- status
- FROM {periods} as periods
- CROSS JOIN (
- SELECT status
- FROM (SELECT 1)
- ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
- ) as sec
- ORDER BY status, start_of_period
- UNION ALL
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
- GROUP BY start_of_period, status
- )
- WHERE start_of_period <= dateTrunc({interval}, {date_to})
- AND start_of_period >= dateTrunc({interval}, {date_from})
- GROUP BY start_of_period, status
- ORDER BY start_of_period ASC
- )
- GROUP BY status
- """,
- {**placeholders, "periods": periods, "events_query": events_query},
- )
-
- response = execute_hogql_query(
- team=team,
- query=lifecycle_sql,
- query_type="LifecycleQuery",
- )
-
- # ensure that the items are in a deterministic order
- order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4}
- results = sorted(response.results, key=lambda result: order.get(result[2], result[2]))
-
- res = []
- for val in results:
- counts = val[1]
- labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if interval == "hour" else "")) for item in val[0]]
- days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if interval == "hour" else "")) for item in val[0]]
-
- label = "{} - {}".format("", val[2]) # entity.name
- additional_values = {"label": label, "status": val[2]}
- res.append(
- {
- "data": [float(c) for c in counts],
- "count": float(sum(counts)),
- "labels": labels,
- "days": days,
- **additional_values,
- }
- )
-
- return {"result": res}
diff --git a/posthog/hogql_queries/lifecycle_query_runner.py b/posthog/hogql_queries/lifecycle_query_runner.py
new file mode 100644
index 0000000000000..2b970bb95156c
--- /dev/null
+++ b/posthog/hogql_queries/lifecycle_query_runner.py
@@ -0,0 +1,252 @@
+from typing import Optional, Any, Dict, List
+
+from django.utils.timezone import datetime
+
+from posthog.hogql import ast
+from posthog.hogql.parser import parse_expr, parse_select
+from posthog.hogql.property import property_to_expr, action_to_expr
+from posthog.hogql.query import execute_hogql_query
+from posthog.hogql.timings import HogQLTimings
+from posthog.hogql_queries.query_runner import QueryRunner
+from posthog.models import Team, Action
+from posthog.hogql_queries.utils.query_date_range import QueryDateRange
+from posthog.models.filters.mixins.utils import cached_property
+from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse
+
+
+class LifecycleQueryRunner(QueryRunner):
+ query: LifecycleQuery
+
+ def __init__(self, query: LifecycleQuery | Dict[str, Any], team: Team, timings: Optional[HogQLTimings] = None):
+ super().__init__(team, timings)
+ if isinstance(query, LifecycleQuery):
+ self.query = query
+ else:
+ self.query = LifecycleQuery.parse_obj(query)
+
+ def to_query(self) -> ast.SelectQuery:
+ placeholders = {
+ **self.query_date_range.to_placeholders(),
+ "events_query": self.events_query,
+ "periods_query": self.periods_query,
+ }
+ with self.timings.measure("lifecycle_query"):
+ lifecycle_query = parse_select(
+ """
+ SELECT groupArray(start_of_period) AS date,
+ groupArray(counts) AS total,
+ status
+ FROM (
+ SELECT
+ status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
+ start_of_period,
+ status
+ FROM (
+ SELECT
+ periods.start_of_period as start_of_period,
+ 0 AS counts,
+ status
+ FROM {periods_query} as periods
+ CROSS JOIN (
+ SELECT status
+ FROM (SELECT 1)
+ ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
+ ) as sec
+ ORDER BY status, start_of_period
+ UNION ALL
+ SELECT
+ start_of_period, count(DISTINCT person_id) AS counts, status
+ FROM {events_query}
+ GROUP BY start_of_period, status
+ )
+ WHERE start_of_period <= dateTrunc({interval}, {date_to})
+ AND start_of_period >= dateTrunc({interval}, {date_from})
+ GROUP BY start_of_period, status
+ ORDER BY start_of_period ASC
+ )
+ GROUP BY status
+ """,
+ placeholders,
+ timings=self.timings,
+ )
+ return lifecycle_query
+
+ def to_persons_query(self) -> str:
+ # TODO: add support for selecting and filtering by breakdowns
+ with self.timings.measure("persons_query"):
+ return parse_select(
+ """
+ SELECT
+ person_id, start_of_period as breakdown_1, status as breakdown_2
+ FROM
+ {events_query}
+ """,
+ placeholders={"events_query": self.events_query},
+ )
+
+ def run(self) -> LifecycleQueryResponse:
+ response = execute_hogql_query(
+ query_type="LifecycleQuery",
+ query=self.to_query(),
+ team=self.team,
+ timings=self.timings,
+ )
+
+ # TODO: can we move the data conversion part into the query as well? It would make it easier to swap
+ # e.g. the LifecycleQuery with HogQLQuery, while keeping the chart logic the same.
+
+ # ensure that the items are in a deterministic order
+ order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4}
+ results = sorted(response.results, key=lambda result: order.get(result[2], 5))
+
+ res = []
+ for val in results:
+ counts = val[1]
+ labels = [
+ item.strftime("%-d-%b-%Y{}".format(" %H:%M" if self.query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
+ days = [
+ item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if self.query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
+
+ label = "{} - {}".format("", val[2]) # entity.name
+ additional_values = {"label": label, "status": val[2]}
+ res.append(
+ {
+ "data": [float(c) for c in counts],
+ "count": float(sum(counts)),
+ "labels": labels,
+ "days": days,
+ **additional_values,
+ }
+ )
+
+ return LifecycleQueryResponse(result=res, timings=response.timings)
+
+ @cached_property
+ def query_date_range(self):
+ return QueryDateRange(
+ date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now()
+ )
+
+ @cached_property
+ def event_filter(self) -> ast.Expr:
+ event_filters: List[ast.Expr] = []
+ with self.timings.measure("date_range"):
+ event_filters.append(
+ parse_expr(
+ "timestamp >= dateTrunc({interval}, {date_from}) - {one_interval}",
+ {
+ "interval": self.query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": self.query_date_range.one_interval_period(),
+ "date_from": self.query_date_range.date_from_as_hogql(),
+ },
+ timings=self.timings,
+ )
+ )
+ event_filters.append(
+ parse_expr(
+ "timestamp < dateTrunc({interval}, {date_to}) + {one_interval}",
+ {
+ "interval": self.query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": self.query_date_range.one_interval_period(),
+ "date_to": self.query_date_range.date_to_as_hogql(),
+ },
+ timings=self.timings,
+ )
+ )
+ with self.timings.measure("properties"):
+ if self.query.properties is not None and self.query.properties != []:
+ event_filters.append(property_to_expr(self.query.properties, self.team))
+ with self.timings.measure("series_filters"):
+ for serie in self.query.series or []:
+ if isinstance(serie, ActionsNode):
+ action = Action.objects.get(pk=int(serie.id), team=self.team)
+ event_filters.append(action_to_expr(action))
+ elif isinstance(serie, EventsNode):
+ if serie.event is not None:
+ event_filters.append(
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Field(chain=["event"]),
+ right=ast.Constant(value=str(serie.event)),
+ )
+ )
+ else:
+ raise ValueError(f"Invalid serie kind: {serie.kind}")
+ if serie.properties is not None and serie.properties != []:
+ event_filters.append(property_to_expr(serie.properties, self.team))
+ with self.timings.measure("test_account_filters"):
+ if (
+ self.query.filterTestAccounts
+ and isinstance(self.team.test_account_filters, list)
+ and len(self.team.test_account_filters) > 0
+ ):
+ for property in self.team.test_account_filters:
+ event_filters.append(property_to_expr(property, self.team))
+
+ if len(event_filters) == 0:
+ return ast.Constant(value=True)
+ elif len(event_filters) == 1:
+ return event_filters[0]
+ else:
+ return ast.And(exprs=event_filters)
+
+ @cached_property
+ def events_query(self):
+ with self.timings.measure("events_query"):
+ events_query = parse_select(
+ """
+ SELECT
+ events.person.id as person_id,
+ min(events.person.created_at) AS created_at,
+ arraySort(groupUniqArray(dateTrunc({interval}, events.timestamp))) AS all_activity,
+ arrayPopBack(arrayPushFront(all_activity, dateTrunc({interval}, created_at))) as previous_activity,
+ arrayPopFront(arrayPushBack(all_activity, dateTrunc({interval}, toDateTime('1970-01-01 00:00:00')))) as following_activity,
+ arrayMap((previous, current, index) -> (previous = current ? 'new' : ((current - {one_interval_period}) = previous AND index != 1) ? 'returning' : 'resurrecting'), previous_activity, all_activity, arrayEnumerate(all_activity)) as initial_status,
+ arrayMap((current, next) -> (current + {one_interval_period} = next ? '' : 'dormant'), all_activity, following_activity) as dormant_status,
+ arrayMap(x -> x + {one_interval_period}, arrayFilter((current, is_dormant) -> is_dormant = 'dormant', all_activity, dormant_status)) as dormant_periods,
+ arrayMap(x -> 'dormant', dormant_periods) as dormant_label,
+ arrayConcat(arrayZip(all_activity, initial_status), arrayZip(dormant_periods, dormant_label)) as temp_concat,
+ arrayJoin(temp_concat) as period_status_pairs,
+ period_status_pairs.1 as start_of_period,
+ period_status_pairs.2 as status
+ FROM events
+ WHERE {event_filter}
+ GROUP BY person_id
+ """,
+ placeholders={
+ **self.query_date_range.to_placeholders(),
+ "event_filter": self.event_filter,
+ },
+ timings=self.timings,
+ )
+ sampling_factor = self.query.samplingFactor
+ if sampling_factor is not None and isinstance(sampling_factor, float):
+ sample_expr = ast.SampleExpr(sample_value=ast.RatioExpr(left=ast.Constant(value=sampling_factor)))
+ events_query.select_from.sample = sample_expr
+
+ return events_query
+
+ @cached_property
+ def periods_query(self):
+ with self.timings.measure("periods_query"):
+ periods_query = parse_select(
+ """
+ SELECT (
+ dateTrunc({interval}, {date_to}) - {number_interval_period}
+ ) AS start_of_period
+ FROM numbers(
+ dateDiff(
+ {interval},
+ dateTrunc({interval}, {date_from}),
+ dateTrunc({interval}, {date_to} + {one_interval_period})
+ )
+ )
+ """,
+ placeholders=self.query_date_range.to_placeholders(),
+ timings=self.timings,
+ )
+ return periods_query
diff --git a/posthog/hogql_queries/query_date_range.py b/posthog/hogql_queries/query_date_range.py
deleted file mode 100644
index 4d76b222deb2b..0000000000000
--- a/posthog/hogql_queries/query_date_range.py
+++ /dev/null
@@ -1,114 +0,0 @@
-from datetime import datetime
-from functools import cached_property, lru_cache
-from typing import Optional
-
-import pytz
-from dateutil.relativedelta import relativedelta
-
-from posthog.hogql.parser import parse_expr, ast
-from posthog.models.team import Team
-from posthog.queries.util import get_earliest_timestamp
-from posthog.schema import DateRange, IntervalType
-from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
-
-
-# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
-class QueryDateRange:
- """Translation of the raw `date_from` and `date_to` filter values to datetimes."""
-
- _team: Team
- _date_range: Optional[DateRange]
- _interval: Optional[IntervalType]
- _now_non_timezone: datetime
-
- def __init__(
- self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
- ) -> None:
- self._team = team
- self._date_range = date_range
- self._interval = interval
- self._now_non_timezone = now
-
- @cached_property
- def date_to(self) -> datetime:
- date_to = self._now
- delta_mapping = None
-
- if self._date_range and self._date_range.date_to:
- date_to, delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self._now
- )
-
- is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
- if not self.is_hourly():
- date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
- elif is_relative:
- date_to = date_to.replace(minute=59, second=59, microsecond=999999)
-
- return date_to
-
- def get_earliest_timestamp(self):
- return get_earliest_timestamp(self._team.pk)
-
- @cached_property
- def date_from(self) -> datetime:
- date_from: datetime
- if self._date_range and self._date_range.date_from == "all":
- date_from = self.get_earliest_timestamp()
- elif self._date_range and isinstance(self._date_range.date_from, str):
- date_from = relative_date_parse(self._date_range.date_from, self._team.timezone_info, now=self._now)
- else:
- date_from = self._now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
- days=DEFAULT_DATE_FROM_DAYS
- )
-
- if not self.is_hourly():
- date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0)
-
- return date_from
-
- @cached_property
- def _now(self):
- return self._localize_to_team(self._now_non_timezone)
-
- def _localize_to_team(self, target: datetime):
- return target.astimezone(pytz.timezone(self._team.timezone))
-
- @cached_property
- def date_to_str(self) -> str:
- return self.date_to.strftime("%Y-%m-%d %H:%M:%S")
-
- @cached_property
- def date_from_str(self) -> str:
- return self.date_from.strftime("%Y-%m-%d %H:%M:%S")
-
- def is_hourly(self):
- return self.interval.name == "hour"
-
- @cached_property
- def date_to_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_to_str}'))")
-
- @cached_property
- def date_from_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_from_str}'))")
-
- @cached_property
- def interval(self):
- return self._interval or IntervalType.day
-
- @cached_property
- def one_interval_period_as_hogql(self):
- return parse_expr(f"toInterval{self.interval.capitalize()}(1)")
-
- @lru_cache
- def interval_periods_as_hogql(self, s: str):
- return parse_expr(f"toInterval{self.interval.capitalize()}({s})")
-
- @cached_property
- def interval_period_string(self):
- return self.interval.value
-
- @cached_property
- def interval_period_string_as_hogql(self):
- return ast.Constant(value=self.interval.value)
diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py
new file mode 100644
index 0000000000000..b8a3a10a4aa7b
--- /dev/null
+++ b/posthog/hogql_queries/query_runner.py
@@ -0,0 +1,37 @@
+from typing import Optional
+
+from pydantic import BaseModel
+
+from posthog.hogql import ast
+from posthog.hogql.context import HogQLContext
+from posthog.hogql.printer import print_ast
+from posthog.hogql.timings import HogQLTimings
+from posthog.models import Team
+
+
+class QueryRunner:
+ query: BaseModel
+ team: Team
+ timings: HogQLTimings
+
+ def __init__(self, team: Team, timings: Optional[HogQLTimings] = None):
+ self.team = team
+ self.timings = timings or HogQLTimings()
+
+ def run(self) -> BaseModel:
+ raise NotImplementedError()
+
+ def to_query(self) -> ast.SelectQuery:
+ raise NotImplementedError()
+
+ def to_persons_query(self) -> str:
+ # TODO: add support for selecting and filtering by breakdowns
+ raise NotImplementedError()
+
+ def to_hogql(self) -> str:
+ with self.timings.measure("to_hogql"):
+ return print_ast(
+ self.to_query(),
+ HogQLContext(team_id=self.team.pk, enable_select_queries=True, timings=self.timings),
+ "hogql",
+ )
diff --git a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
index 5cc56252b046f..d9996640f64c3 100644
--- a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
+++ b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
@@ -3,9 +3,8 @@
from freezegun import freeze_time
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.lifecycle_query_runner import LifecycleQueryRunner
from posthog.models.utils import UUIDT
-from posthog.hogql_queries.lifecycle_hogql_query import create_events_query, create_time_filter, run_lifecycle_query
-from posthog.hogql_queries.query_date_range import QueryDateRange
from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode
from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
@@ -67,105 +66,29 @@ def _create_test_events(self):
]
)
- def _run_events_query(self, date_from, date_to, interval):
- date_range = QueryDateRange(
- date_range=DateRange(date_from=date_from, date_to=date_to),
- team=self.team,
- interval=interval,
- now=datetime.strptime("2020-01-30T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"),
+ def _create_query_runner(self, date_from, date_to, interval) -> LifecycleQueryRunner:
+ series = [EventsNode(event="$pageview")]
+ query = LifecycleQuery(
+ dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series
)
- time_filter = create_time_filter(date_range)
+ return LifecycleQueryRunner(team=self.team, query=query)
- # TODO probably doesn't make sense to test like this
- # maybe this query should be what is returned by the function
- events_query = create_events_query(event_filter=time_filter, date_range=date_range)
+ def _run_events_query(self, date_from, date_to, interval):
+ events_query = self._create_query_runner(date_from, date_to, interval).events_query
return execute_hogql_query(
team=self.team,
query="""
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
- GROUP BY start_of_period, status
- """,
- query_type="LifecycleQuery",
+ SELECT
+ start_of_period, count(DISTINCT person_id) AS counts, status
+ FROM {events_query}
+ GROUP BY start_of_period, status
+ """,
placeholders={"events_query": events_query},
+ query_type="LifecycleEventsQuery",
)
- def test_events_query_whole_range(self):
- self._create_test_events()
-
- date_from = "2020-01-09"
- date_to = "2020-01-19"
-
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2
- (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4
- (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4
- (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- def test_events_query_partial_range(self):
- self._create_test_events()
- date_from = "2020-01-12"
- date_to = "2020-01-14"
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- # def test_start_on_dormant(self):
- # self.create_test_events()
- # date_from = "2020-01-13"
- # date_to = "2020-01-14"
- # response = self.run_events_query(date_from, date_to, IntervalType.day)
- #
- # self.assertEqual(
- # {
- # (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- # # TODO this currently fails, as it treats p1 as resurrecting.
- # # This might just be fine, later in the query we would just throw away results before the 13th
- # (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- # (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- # (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- # },
- # set(response.results),
- # )
-
def _run_lifecycle_query(self, date_from, date_to, interval):
- series = [EventsNode(event="$pageview")]
- query = LifecycleQuery(
- dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series
- )
- return run_lifecycle_query(team=self.team, query=query)
+ return self._create_query_runner(date_from, date_to, interval).run()
def test_lifecycle_query_whole_range(self):
self._create_test_events()
@@ -175,7 +98,7 @@ def test_lifecycle_query_whole_range(self):
response = self._run_lifecycle_query(date_from, date_to, IntervalType.day)
- statuses = [res["status"] for res in response["result"]]
+ statuses = [res["status"] for res in response.result]
self.assertEqual(["new", "returning", "resurrecting", "dormant"], statuses)
self.assertEqual(
@@ -357,5 +280,54 @@ def test_lifecycle_query_whole_range(self):
"status": "dormant",
},
],
- response["result"],
+ response.result,
+ )
+
+ def test_events_query_whole_range(self):
+ self._create_test_events()
+
+ date_from = "2020-01-09"
+ date_to = "2020-01-19"
+
+ response = self._run_events_query(date_from, date_to, IntervalType.day)
+
+ self.assertEqual(
+ {
+ (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2
+ (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2
+ (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
+ (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
+ (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
+ (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
+ (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
+ (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4
+ (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4
+ (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1
+ (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1
+ (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1
+ },
+ set(response.results),
+ )
+
+ def test_events_query_partial_range(self):
+ self._create_test_events()
+ date_from = "2020-01-12"
+ date_to = "2020-01-14"
+ response = self._run_events_query(date_from, date_to, IntervalType.day)
+
+ self.assertEqual(
+ {
+ (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
+ (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
+ (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
+ (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
+ (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
+ (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
+ },
+ set(response.results),
)
diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py
new file mode 100644
index 0000000000000..a9c86614cac5f
--- /dev/null
+++ b/posthog/hogql_queries/utils/query_date_range.py
@@ -0,0 +1,124 @@
+import re
+from functools import cached_property
+from datetime import datetime
+from typing import Optional, Dict
+from zoneinfo import ZoneInfo
+
+from dateutil.relativedelta import relativedelta
+
+from posthog.hogql.parser import ast
+from posthog.models.team import Team
+from posthog.queries.util import get_earliest_timestamp
+from posthog.schema import DateRange, IntervalType
+from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping
+
+
+# Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries
+class QueryDateRange:
+ """Translation of the raw `date_from` and `date_to` filter values to datetimes."""
+
+ _team: Team
+ _date_range: Optional[DateRange]
+ _interval: Optional[IntervalType]
+ _now_without_timezone: datetime
+
+ def __init__(
+ self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
+ ) -> None:
+ self._team = team
+ self._date_range = date_range
+ self._interval = interval or IntervalType.day
+ self._now_without_timezone = now
+
+ if not isinstance(self._interval, IntervalType) or re.match(r"[^a-z]", self._interval.name):
+ raise ValueError(f"Invalid interval: {interval}")
+
+ def date_to(self) -> datetime:
+ date_to = self.now_with_timezone
+ delta_mapping = None
+
+ if self._date_range and self._date_range.date_to:
+ date_to, delta_mapping = relative_date_parse_with_delta_mapping(
+ self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone
+ )
+
+ is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
+ if not self.is_hourly:
+ date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
+ elif is_relative:
+ date_to = date_to.replace(minute=59, second=59, microsecond=999999)
+
+ return date_to
+
+ def get_earliest_timestamp(self) -> datetime:
+ return get_earliest_timestamp(self._team.pk)
+
+ def date_from(self) -> datetime:
+ date_from: datetime
+ if self._date_range and self._date_range.date_from == "all":
+ date_from = self.get_earliest_timestamp()
+ elif self._date_range and isinstance(self._date_range.date_from, str):
+ date_from = relative_date_parse(
+ self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone
+ )
+ else:
+ date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
+ days=DEFAULT_DATE_FROM_DAYS
+ )
+
+ if not self.is_hourly:
+ date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0)
+
+ return date_from
+
+ @cached_property
+ def now_with_timezone(self) -> datetime:
+ return self._now_without_timezone.astimezone(ZoneInfo(self._team.timezone))
+
+ @cached_property
+ def date_to_str(self) -> str:
+ return self.date_to().strftime("%Y-%m-%d %H:%M:%S")
+
+ @cached_property
+ def date_from_str(self) -> str:
+ return self.date_from().strftime("%Y-%m-%d %H:%M:%S")
+
+ @cached_property
+ def is_hourly(self) -> bool:
+ return self.interval_name == "hour"
+
+ @cached_property
+ def interval_type(self) -> IntervalType:
+ return self._interval or IntervalType.day
+
+ @cached_property
+ def interval_name(self) -> str:
+ return self.interval_type.name
+
+ def date_to_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])]
+ )
+
+ def date_from_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])]
+ )
+
+ def one_interval_period(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)])
+
+ def number_interval_periods(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])])
+
+ def interval_period_string_as_hogql_constant(self) -> ast.Expr:
+ return ast.Constant(value=self.interval_name)
+
+ def to_placeholders(self) -> Dict[str, ast.Expr]:
+ return {
+ "interval": self.interval_period_string_as_hogql_constant(),
+ "one_interval_period": self.one_interval_period(),
+ "number_interval_period": self.number_interval_periods(),
+ "date_from": self.date_from_as_hogql(),
+ "date_to": self.date_to_as_hogql(),
+ }
diff --git a/posthog/hogql_queries/test/test_query_date_range.py b/posthog/hogql_queries/utils/test/test_query_date_range.py
similarity index 54%
rename from posthog/hogql_queries/test/test_query_date_range.py
rename to posthog/hogql_queries/utils/test/test_query_date_range.py
index 82966cc5f1bff..0ab8467567a50 100644
--- a/posthog/hogql_queries/test/test_query_date_range.py
+++ b/posthog/hogql_queries/utils/test/test_query_date_range.py
@@ -1,6 +1,6 @@
from dateutil import parser
-from posthog.hogql_queries.query_date_range import QueryDateRange
+from posthog.hogql_queries.utils.query_date_range import QueryDateRange
from posthog.schema import DateRange, IntervalType
from posthog.test.base import APIBaseTest
@@ -10,32 +10,17 @@ def test_parsed_date(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T23:59:59.999999Z"))
def test_parsed_date_hour(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T00:59:59.999999Z"),
+ query_date_range.date_to(), parser.isoparse("2021-08-25T00:59:59.999999Z")
) # ensure last hour is included
def test_parsed_date_middle_of_hour(self):
@@ -43,34 +28,25 @@ def test_parsed_date_middle_of_hour(self):
date_range = DateRange(date_from="2021-08-23 05:00:00", date_to="2021-08-26 07:00:00")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
- self.assertEqual(parsed_date_from, parser.isoparse("2021-08-23 05:00:00Z"))
- self.assertEqual(parsed_date_to, parser.isoparse("2021-08-26 07:00:00Z")) # ensure last hour is included
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23 05:00:00Z"))
+ self.assertEqual(
+ query_date_range.date_to(), parser.isoparse("2021-08-26 07:00:00Z")
+ ) # ensure last hour is included
def test_parsed_date_week(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-7d")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.week, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-18 00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25 23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-18 00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25 23:59:59.999999Z"))
def test_is_hourly(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- self.assertFalse(query_date_range.is_hourly())
+ self.assertFalse(query_date_range.is_hourly)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- self.assertTrue(query_date_range.is_hourly())
+ self.assertTrue(query_date_range.is_hourly)
diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py
index 20b0b4c89ca86..eadf71532db02 100644
--- a/posthog/management/commands/create_batch_export_from_app.py
+++ b/posthog/management/commands/create_batch_export_from_app.py
@@ -48,6 +48,12 @@ def add_arguments(self, parser):
default=False,
help="Backfill the newly created BatchExport with the last period of data.",
)
+ parser.add_argument(
+ "--migrate-disabled-plugin-config",
+ action="store_true",
+ default=False,
+ help="Migrate a PluginConfig even if its disabled.",
+ )
def handle(self, *args, **options):
"""Handle creation of a BatchExport from a given PluginConfig."""
@@ -82,8 +88,8 @@ def handle(self, *args, **options):
"destination_data": destination_data,
}
- if dry_run is True:
- self.stdout.write("No BatchExport will be created as this is a dry run or confirmation check rejected.")
+ if dry_run is True or (options["migrate_disabled_plugin_config"] is False and plugin_config.enabled is False):
+ self.stdout.write("No BatchExport will be created as this is a dry run or existing plugin is disabled.")
return json.dumps(batch_export_data, indent=4, default=str)
else:
destination = BatchExportDestination(**batch_export_data["destination_data"])
diff --git a/posthog/management/commands/send_usage_report.py b/posthog/management/commands/send_usage_report.py
index 4c67d451c2a8a..03e4b4a102da4 100644
--- a/posthog/management/commands/send_usage_report.py
+++ b/posthog/management/commands/send_usage_report.py
@@ -1,5 +1,3 @@
-import pprint
-
from django.core.management.base import BaseCommand
from posthog.tasks.usage_report import send_all_org_usage_reports
@@ -10,7 +8,6 @@ class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it")
- parser.add_argument("--print-reports", type=bool, help="Print the reports in full")
parser.add_argument("--date", type=str, help="The date to be ran in format YYYY-MM-DD")
parser.add_argument("--event-name", type=str, help="Override the event name to be sent - for testing")
parser.add_argument(
@@ -28,20 +25,14 @@ def handle(self, *args, **options):
run_async = options["async"]
if run_async:
- results = send_all_org_usage_reports.delay(
+ send_all_org_usage_reports.delay(
dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
)
else:
- results = send_all_org_usage_reports(
+ send_all_org_usage_reports(
dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id
)
- if options["print_reports"]:
- print("") # noqa T201
- pprint.pprint(results) # noqa T203
- print("") # noqa T201
if dry_run:
print("Dry run so not sent.") # noqa T201
- else:
- print(f"{len(results)} Reports sent!") # noqa T201
print("Done!") # noqa T201
diff --git a/posthog/management/commands/test/test_create_batch_export_from_app.py b/posthog/management/commands/test/test_create_batch_export_from_app.py
index 4a51975d86648..bbbb36079d013 100644
--- a/posthog/management/commands/test/test_create_batch_export_from_app.py
+++ b/posthog/management/commands/test/test_create_batch_export_from_app.py
@@ -1,4 +1,5 @@
import datetime as dt
+import itertools
import json
import typing
@@ -116,6 +117,20 @@ def plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginC
raise ValueError(f"Unsupported plugin: {request.param}")
+@pytest.fixture
+def disabled_plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginConfig:
+ if request.param == "S3":
+ s3_plugin_config.enabled = False
+ s3_plugin_config.save()
+ return s3_plugin_config
+ elif request.param == "Snowflake":
+ snowflake_plugin_config.enabled = False
+ snowflake_plugin_config.save()
+ return snowflake_plugin_config
+ else:
+ raise ValueError(f"Unsupported plugin: {request.param}")
+
+
@pytest.mark.django_db
@pytest.mark.parametrize(
"plugin_config,config,expected_type",
@@ -155,7 +170,6 @@ def test_create_batch_export_from_app_fails_with_mismatched_team_id(plugin_confi
@pytest.mark.parametrize("plugin_config", ["S3", "Snowflake"], indirect=True)
def test_create_batch_export_from_app_dry_run(plugin_config):
"""Test a dry_run of the create_batch_export_from_app command."""
-
output = call_command(
"create_batch_export_from_app",
f"--plugin-config-id={plugin_config.id}",
@@ -166,6 +180,7 @@ def test_create_batch_export_from_app_dry_run(plugin_config):
batch_export_data = json.loads(output)
+ assert "id" not in batch_export_data
assert batch_export_data["team_id"] == plugin_config.team.id
assert batch_export_data["interval"] == "hour"
assert batch_export_data["name"] == f"{export_type} Export"
@@ -178,19 +193,14 @@ def test_create_batch_export_from_app_dry_run(plugin_config):
@pytest.mark.django_db
@pytest.mark.parametrize(
"interval,plugin_config,disable_plugin_config",
- [
- ("hour", "S3", True),
- ("hour", "S3", False),
- ("day", "S3", True),
- ("day", "S3", False),
- ("hour", "Snowflake", True),
- ("hour", "Snowflake", False),
- ("day", "Snowflake", True),
- ("day", "Snowflake", False),
- ],
+ itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]),
indirect=["plugin_config"],
)
-def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_config):
+def test_create_batch_export_from_app(
+ interval,
+ plugin_config,
+ disable_plugin_config,
+):
"""Test a live run of the create_batch_export_from_app command."""
args = [
f"--plugin-config-id={plugin_config.id}",
@@ -237,6 +247,69 @@ def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_co
assert args[key] == expected
+@pytest.mark.django_db
+@pytest.mark.parametrize(
+ "interval,disabled_plugin_config,migrate_disabled_plugin_config",
+ itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]),
+ indirect=["disabled_plugin_config"],
+)
+def test_create_batch_export_from_app_with_disabled_plugin(
+ interval,
+ disabled_plugin_config,
+ migrate_disabled_plugin_config,
+):
+ """Test a live run of the create_batch_export_from_app command."""
+ args = [
+ f"--plugin-config-id={disabled_plugin_config.id}",
+ f"--team-id={disabled_plugin_config.team.id}",
+ f"--interval={interval}",
+ ]
+ if migrate_disabled_plugin_config:
+ args.append("--migrate-disabled-plugin-config")
+
+ output = call_command("create_batch_export_from_app", *args)
+
+ disabled_plugin_config.refresh_from_db()
+ assert disabled_plugin_config.enabled is False
+
+ export_type, config = map_plugin_config_to_destination(disabled_plugin_config)
+
+ batch_export_data = json.loads(output)
+
+ assert batch_export_data["team_id"] == disabled_plugin_config.team.id
+ assert batch_export_data["interval"] == interval
+ assert batch_export_data["name"] == f"{export_type} Export"
+ assert batch_export_data["destination_data"] == {
+ "type": export_type,
+ "config": config,
+ }
+
+ if not migrate_disabled_plugin_config:
+ assert "id" not in batch_export_data
+ return
+
+ assert "id" in batch_export_data
+
+ temporal = sync_connect()
+
+ schedule = describe_schedule(temporal, str(batch_export_data["id"]))
+ expected_interval = dt.timedelta(**{f"{interval}s": 1})
+ assert schedule.schedule.spec.intervals[0].every == expected_interval
+
+ codec = EncryptionCodec(settings=settings)
+ decoded_payload = async_to_sync(codec.decode)(schedule.schedule.action.args)
+ args = json.loads(decoded_payload[0].data)
+
+ # Common inputs
+ assert args["team_id"] == disabled_plugin_config.team.pk
+ assert args["batch_export_id"] == str(batch_export_data["id"])
+ assert args["interval"] == interval
+
+ # Type specific inputs
+ for key, expected in config.items():
+ assert args[key] == expected
+
+
@async_to_sync
async def list_workflows(temporal, schedule_id: str):
"""List Workflows scheduled by given Schedule."""
diff --git a/posthog/migrations/0348_alter_datawarehousetable_format.py b/posthog/migrations/0348_alter_datawarehousetable_format.py
new file mode 100644
index 0000000000000..72434bbc99fdb
--- /dev/null
+++ b/posthog/migrations/0348_alter_datawarehousetable_format.py
@@ -0,0 +1,20 @@
+# Generated by Django 3.2.19 on 2023-09-11 15:22
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0347_add_bigquery_export_type"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="datawarehousetable",
+ name="format",
+ field=models.CharField(
+ choices=[("CSV", "CSV"), ("Parquet", "Parquet"), ("JSONEachRow", "JSON")], max_length=128
+ ),
+ ),
+ ]
diff --git a/posthog/migrations/0349_update_survey_query_name.py b/posthog/migrations/0349_update_survey_query_name.py
new file mode 100644
index 0000000000000..cbcbbb3a0c954
--- /dev/null
+++ b/posthog/migrations/0349_update_survey_query_name.py
@@ -0,0 +1,38 @@
+# Generated by Django 3.2.19 on 2023-09-12 10:35
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0348_alter_datawarehousetable_format"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="survey",
+ name="linked_flag",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="surveys_linked_flag",
+ related_query_name="survey_linked_flag",
+ to="posthog.featureflag",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="survey",
+ name="targeting_flag",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="surveys_targeting_flag",
+ related_query_name="survey_targeting_flag",
+ to="posthog.featureflag",
+ ),
+ ),
+ ]
diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py
new file mode 100644
index 0000000000000..bfe4b079b9945
--- /dev/null
+++ b/posthog/migrations/0350_add_notebook_text_content.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.19 on 2023-09-12 18:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0349_update_survey_query_name"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="notebook",
+ name="text_content",
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py
index ba47b2c326ff1..f3b36e2c3dbd0 100644
--- a/posthog/models/activity_logging/activity_log.py
+++ b/posthog/models/activity_logging/activity_log.py
@@ -99,7 +99,7 @@ class Meta:
field_exclusions: Dict[ActivityScope, List[str]] = {
- "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by"],
+ "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"],
"FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"],
"Person": [
"id",
diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py
index 8b6a2fbd33d27..8f76e9f79fb91 100644
--- a/posthog/models/event/util.py
+++ b/posthog/models/event/util.py
@@ -3,7 +3,7 @@
import uuid
from typing import Any, Dict, List, Optional, Set, Union
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.utils import timezone
from rest_framework import serializers
@@ -47,7 +47,7 @@ def create_event(
timestamp = timezone.now()
assert timestamp is not None
- timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(pytz.utc)
+ timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(ZoneInfo("UTC"))
elements_chain = ""
if elements and len(elements) > 0:
@@ -89,7 +89,9 @@ def format_clickhouse_timestamp(
if default is None:
default = timezone.now()
parsed_datetime = (
- isoparse(raw_timestamp) if isinstance(raw_timestamp, str) else (raw_timestamp or default).astimezone(pytz.utc)
+ isoparse(raw_timestamp)
+ if isinstance(raw_timestamp, str)
+ else (raw_timestamp or default).astimezone(ZoneInfo("UTC"))
)
return parsed_datetime.strftime("%Y-%m-%d %H:%M:%S.%f")
@@ -110,16 +112,16 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
inserts = []
params: Dict[str, Any] = {}
for index, event in enumerate(events):
- datetime64_default_timestamp = timezone.now().astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
+ datetime64_default_timestamp = timezone.now().astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S")
timestamp = event.get("timestamp") or dt.datetime.now()
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
# Offset timezone-naive datetime by project timezone, to facilitate @also_test_with_different_timezones
if timestamp.tzinfo is None:
team_timezone = event["team"].timezone if event.get("team") else "UTC"
- timestamp = pytz.timezone(team_timezone).localize(timestamp)
+ timestamp = timestamp.replace(tzinfo=ZoneInfo(team_timezone))
# Format for ClickHouse
- timestamp = timestamp.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S.%f")
+ timestamp = timestamp.astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S.%f")
elements_chain = ""
if event.get("elements") and len(event["elements"]) > 0:
diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py
index 9579ab7a5a782..5287747ea98f5 100644
--- a/posthog/models/feedback/survey.py
+++ b/posthog/models/feedback/survey.py
@@ -24,7 +24,7 @@ class Meta:
blank=True,
on_delete=models.SET_NULL,
related_name="surveys_linked_flag",
- related_query_name="survey",
+ related_query_name="survey_linked_flag",
)
targeting_flag: models.ForeignKey = models.ForeignKey(
"posthog.FeatureFlag",
@@ -32,7 +32,7 @@ class Meta:
blank=True,
on_delete=models.SET_NULL,
related_name="surveys_targeting_flag",
- related_query_name="survey",
+ related_query_name="survey_targeting_flag",
)
type: models.CharField = models.CharField(max_length=40, choices=SurveyType.choices)
diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py
index 530b7b83b9d13..bbb727407c6be 100644
--- a/posthog/models/filters/mixins/common.py
+++ b/posthog/models/filters/mixins/common.py
@@ -4,7 +4,7 @@
from math import ceil
from typing import Any, Dict, List, Literal, Optional, Union, cast
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from rest_framework.exceptions import ValidationError
@@ -361,11 +361,13 @@ def date_to(self) -> datetime.datetime:
if isinstance(self._date_to, str):
try:
return datetime.datetime.strptime(self._date_to, "%Y-%m-%d").replace(
- hour=23, minute=59, second=59, microsecond=999999, tzinfo=pytz.UTC
+ hour=23, minute=59, second=59, microsecond=999999, tzinfo=ZoneInfo("UTC")
)
except ValueError:
try:
- return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC)
+ return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace(
+ tzinfo=ZoneInfo("UTC")
+ )
except ValueError:
date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_to, self.team.timezone_info, always_truncate=True) # type: ignore
self.date_to_delta_mapping = delta_mapping
diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py
index a6d38bf76e7ce..53146bf62a7b3 100644
--- a/posthog/models/filters/mixins/retention.py
+++ b/posthog/models/filters/mixins/retention.py
@@ -90,7 +90,7 @@ def date_to(self) -> datetime:
date_to = date_to + self.period_increment
if self.period == "Hour":
- return date_to
+ return date_to.replace(minute=0, second=0, microsecond=0)
else:
return date_to.replace(hour=0, minute=0, second=0, microsecond=0)
diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr
index 922fdf12a27f1..9be8465ff5f0f 100644
--- a/posthog/models/filters/test/__snapshots__/test_filter.ambr
+++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr
@@ -11,6 +11,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -22,6 +23,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -60,6 +62,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -71,6 +74,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -109,6 +113,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -120,6 +125,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -158,6 +164,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -169,6 +176,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -207,6 +215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -218,6 +227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py
index 1fcf975ca70b2..fa3520dc9912c 100644
--- a/posthog/models/group/util.py
+++ b/posthog/models/group/util.py
@@ -2,7 +2,7 @@
import json
from typing import Dict, Optional, Union
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.utils.timezone import now
@@ -27,7 +27,7 @@ def raw_create_group_ch(
DON'T USE DIRECTLY - `create_group` is the correct option,
unless you specifically want to sync Postgres state from ClickHouse yourself."""
if timestamp is None:
- timestamp = now().astimezone(pytz.utc)
+ timestamp = now().astimezone(ZoneInfo("UTC"))
data = {
"group_type_index": group_type_index,
"group_key": group_key,
@@ -58,7 +58,7 @@ def create_group(
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
raw_create_group_ch(team_id, group_type_index, group_key, properties, timestamp, timestamp=timestamp, sync=sync)
group = Group.objects.create(
diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py
index dde92fddab944..490645909df26 100644
--- a/posthog/models/notebook/notebook.py
+++ b/posthog/models/notebook/notebook.py
@@ -12,6 +12,7 @@ class Notebook(UUIDModel):
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE)
title: models.CharField = models.CharField(max_length=256, blank=True, null=True)
content: JSONField = JSONField(default=None, null=True, blank=True)
+ text_content: models.TextField = models.TextField(blank=True, null=True)
deleted: models.BooleanField = models.BooleanField(default=False)
version: models.IntegerField = models.IntegerField(default=0)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True)
diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py
index 0be065a3258fb..9af13bc6e9d05 100644
--- a/posthog/models/person/util.py
+++ b/posthog/models/person/util.py
@@ -4,7 +4,7 @@
from typing import Dict, List, Optional, Union
from uuid import UUID
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.db.models.query import QuerySet
from django.db.models.signals import post_delete, post_save
@@ -124,12 +124,12 @@ def create_person(
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
if created_at is None:
created_at = timestamp
else:
- created_at = created_at.astimezone(pytz.utc)
+ created_at = created_at.astimezone(ZoneInfo("UTC"))
data = {
"id": str(uuid),
diff --git a/posthog/models/session_replay_event/migrations_sql.py b/posthog/models/session_replay_event/migrations_sql.py
index 09f4e300be624..b11f5581c930f 100644
--- a/posthog/models/session_replay_event/migrations_sql.py
+++ b/posthog/models/session_replay_event/migrations_sql.py
@@ -65,3 +65,29 @@
table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
cluster=settings.CLICKHOUSE_CLUSTER,
)
+
+# migration to add size column to the session replay table
+ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN = """
+ ALTER TABLE {table_name} on CLUSTER '{cluster}'
+ ADD COLUMN IF NOT EXISTS message_count SimpleAggregateFunction(sum, Int64),
+ ADD COLUMN IF NOT EXISTS event_count SimpleAggregateFunction(sum, Int64),
+ -- fly by addition so that we can track lag in the data the same way as for other tables
+ ADD COLUMN IF NOT EXISTS _timestamp SimpleAggregateFunction(max, DateTime)
+"""
+
+ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = (
+ lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ )
+)
+
+ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="writable_session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
+
+ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
diff --git a/posthog/models/session_replay_event/sql.py b/posthog/models/session_replay_event/sql.py
index 1221fd80bb6de..dfe839843979f 100644
--- a/posthog/models/session_replay_event/sql.py
+++ b/posthog/models/session_replay_event/sql.py
@@ -27,7 +27,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = {engine}
"""
@@ -53,7 +55,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = {engine}
"""
@@ -117,7 +127,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
-sum(size) as size
+sum(size) as size,
+-- we can count the number of kafka messages instead of sending it explicitly
+sum(message_count) as message_count,
+sum(event_count) as event_count,
+max(_timestamp) as _timestamp
FROM {database}.kafka_session_replay_events
group by session_id, team_id
""".format(
diff --git a/posthog/models/test/test_subscription_model.py b/posthog/models/test/test_subscription_model.py
index 232b6c99791cc..bc9bf583e6f15 100644
--- a/posthog/models/test/test_subscription_model.py
+++ b/posthog/models/test/test_subscription_model.py
@@ -3,7 +3,7 @@
import jwt
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.conf import settings
from django.utils import timezone
from freezegun import freeze_time
@@ -33,7 +33,7 @@ def _create_insight_subscription(self, **kwargs):
target_value="tests@posthog.com",
frequency="weekly",
interval=2,
- start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC),
+ start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
)
params.update(**kwargs)
@@ -44,8 +44,8 @@ def test_creation(self):
subscription.save()
assert subscription.title == "My Subscription"
- subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=pytz.UTC))
- assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=pytz.UTC)
+ subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")))
+ assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_update_next_delivery_date_on_save(self):
subscription = self._create_insight_subscription()
@@ -60,7 +60,7 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self):
old_date = subscription.next_delivery_date
# Change a property that does affect it
- subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC)
+ subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
subscription.save()
assert old_date != subscription.next_delivery_date
old_date = subscription.next_delivery_date
@@ -72,7 +72,6 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self):
assert old_date == subscription.next_delivery_date
def test_generating_token(self):
-
subscription = self._create_insight_subscription(
target_value="test1@posthog.com,test2@posthog.com,test3@posthog.com"
)
@@ -143,13 +142,13 @@ def test_complex_rrule_configuration(self):
# Last wed or fri of 01.22 is Wed 28th
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
# Last wed or fri of 01.22 is Wed 30th
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
# Last wed or fri of 01.22 is Fri 27th
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_should_work_for_nth_days(self):
# Equivalent to last monday and wednesday of every other month
@@ -160,15 +159,15 @@ def test_should_work_for_nth_days(self):
byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"],
)
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_should_ignore_bysetpos_if_missing_weeekday(self):
# Equivalent to last monday and wednesday of every other month
subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=3)
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_subscription_summary(self):
subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=None)
diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py
index 4b7f2864546ad..484f01546001b 100644
--- a/posthog/queries/app_metrics/historical_exports.py
+++ b/posthog/queries/app_metrics/historical_exports.py
@@ -2,7 +2,7 @@
from datetime import timedelta
from typing import Dict, Optional
-import pytz
+from zoneinfo import ZoneInfo
from posthog.models.activity_logging.activity_log import ActivityLog
from posthog.models.plugin import PluginStorage
@@ -65,10 +65,12 @@ def historical_export_metrics(team: Team, plugin_config_id: int, job_id: str):
filter_data = {
"category": "exportEvents",
"job_id": job_id,
- "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(pytz.utc).isoformat(),
+ "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat(),
}
if "finished_at" in export_summary:
- filter_data["date_to"] = (export_summary["finished_at"] + timedelta(hours=1)).astimezone(pytz.utc).isoformat()
+ filter_data["date_to"] = (
+ (export_summary["finished_at"] + timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat()
+ )
filter = AppMetricsRequestSerializer(data=filter_data)
filter.is_valid(raise_exception=True)
diff --git a/posthog/queries/funnels/test/test_funnel_trends.py b/posthog/queries/funnels/test/test_funnel_trends.py
index 1cb191c017ad2..12e8b81af02a5 100644
--- a/posthog/queries/funnels/test/test_funnel_trends.py
+++ b/posthog/queries/funnels/test/test_funnel_trends.py
@@ -1,6 +1,6 @@
from datetime import date, datetime, timedelta
-import pytz
+from zoneinfo import ZoneInfo
from freezegun.api import freeze_time
from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType
@@ -113,43 +113,43 @@ def test_only_one_user_reached_one_step(self):
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 1,
- "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
],
)
@@ -531,8 +531,8 @@ def test_period_not_final(self):
self.assertEqual(day["reached_to_step_count"], 0)
self.assertEqual(day["conversion_rate"], 0)
self.assertEqual(
- day["timestamp"].replace(tzinfo=pytz.UTC),
- (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=pytz.UTC),
+ day["timestamp"].replace(tzinfo=ZoneInfo("UTC")),
+ (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=ZoneInfo("UTC")),
)
day = results[1] # today
@@ -540,7 +540,8 @@ def test_period_not_final(self):
self.assertEqual(day["reached_to_step_count"], 1)
self.assertEqual(day["conversion_rate"], 100)
self.assertEqual(
- day["timestamp"].replace(tzinfo=pytz.UTC), datetime(now.year, now.month, now.day).replace(tzinfo=pytz.UTC)
+ day["timestamp"].replace(tzinfo=ZoneInfo("UTC")),
+ datetime(now.year, now.month, now.day).replace(tzinfo=ZoneInfo("UTC")),
)
def test_two_runs_by_single_user_in_one_period(self):
diff --git a/posthog/queries/properties_timeline/properties_timeline_event_query.py b/posthog/queries/properties_timeline/properties_timeline_event_query.py
index 5f35a5d91869a..d3ca17eb70091 100644
--- a/posthog/queries/properties_timeline/properties_timeline_event_query.py
+++ b/posthog/queries/properties_timeline/properties_timeline_event_query.py
@@ -1,7 +1,6 @@
import datetime as dt
from typing import Any, Dict, Optional, Tuple
-
-import pytz
+from zoneinfo import ZoneInfo
from posthog.models.entity.util import get_entity_filtering_params
from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter
@@ -76,7 +75,7 @@ def _determine_should_join_sessions(self) -> None:
def _get_date_filter(self) -> Tuple[str, Dict]:
query_params: Dict[str, Any] = {}
query_date_range = QueryDateRange(self._filter, self._team)
- effective_timezone = pytz.timezone(self._team.timezone)
+ effective_timezone = ZoneInfo(self._team.timezone)
# Get effective date range from QueryDateRange
# We need to explicitly replace tzinfo in those datetimes with the team's timezone, because QueryDateRange
# does not reliably make those datetimes timezone-aware. That's annoying, but it'd be a significant effort
diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py
index 927d2766a2358..208bf0207843d 100644
--- a/posthog/queries/query_date_range.py
+++ b/posthog/queries/query_date_range.py
@@ -1,8 +1,8 @@
from datetime import datetime, timedelta
from functools import cached_property
from typing import Dict, Literal, Optional, Tuple
+from zoneinfo import ZoneInfo
-import pytz
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from posthog.models.filters.base_filter import BaseFilter
@@ -82,7 +82,7 @@ def _now(self):
return self._localize_to_team(timezone.now())
def _localize_to_team(self, target: datetime):
- return target.astimezone(pytz.timezone(self._team.timezone))
+ return target.astimezone(ZoneInfo(self._team.timezone))
@cached_property
def date_to_clause(self):
diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py
index dc8f41175521d..145ee1404c37b 100644
--- a/posthog/queries/retention/retention.py
+++ b/posthog/queries/retention/retention.py
@@ -1,7 +1,6 @@
from typing import Any, Dict, List, Optional, Tuple
from urllib.parse import urlencode
-
-import pytz
+from zoneinfo import ZoneInfo
from posthog.constants import RETENTION_FIRST_TIME, RetentionQueryType
from posthog.models.filters.retention_filter import RetentionFilter
@@ -33,7 +32,6 @@ def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> List[Dict
def _get_retention_by_breakdown_values(
self, filter: RetentionFilter, team: Team
) -> Dict[CohortKey, Dict[str, Any]]:
-
actor_query, actor_query_params = build_actor_activity_query(
filter=filter, team=team, retention_events_query=self.event_query
)
@@ -109,11 +107,8 @@ def construct_url(first_day):
for day in range(filter.total_intervals - first_day)
],
"label": "{} {}".format(filter.period, first_day),
- "date": pytz.timezone(team.timezone).localize(
- (filter.date_from + RetentionFilter.determine_time_delta(first_day, filter.period)[0]).replace(
- tzinfo=None
- )
- ),
+ "date": filter.date_from.replace(tzinfo=ZoneInfo(team.timezone))
+ + RetentionFilter.determine_time_delta(first_day, filter.period)[0],
"people_url": construct_url(first_day),
}
for first_day in range(filter.total_intervals)
diff --git a/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py b/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py
index 706fb958b08b9..fea81ced5d0eb 100644
--- a/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py
+++ b/posthog/queries/session_recordings/session_recording_list_from_replay_summary.py
@@ -1,13 +1,13 @@
import dataclasses
-import datetime
import re
-from datetime import timedelta
-from typing import Any, Dict, List, NamedTuple, Tuple, Union
-from typing import Literal
+from datetime import datetime, timedelta
+from typing import Any, Dict, List, Literal, NamedTuple, Tuple, Union
+
+from django.conf import settings
from posthog.client import sync_execute
-from posthog.constants import PropertyOperatorType
-from posthog.constants import TREND_FILTER_TYPE_ACTIONS
+from posthog.cloud_utils import is_cloud
+from posthog.constants import TREND_FILTER_TYPE_ACTIONS, AvailableFeature, PropertyOperatorType
from posthog.models import Entity
from posthog.models.action.util import format_entity_filter
from posthog.models.filters.mixins.utils import cached_property
@@ -16,6 +16,7 @@
from posthog.models.property import PropertyGroup
from posthog.models.property.util import parse_prop_grouped_clauses
from posthog.models.team import PersonOnEventsMode
+from posthog.models.team.team import Team
from posthog.queries.event_query import EventQuery
from posthog.queries.util import PersonPropertiesMode
@@ -54,6 +55,22 @@ def _get_filter_by_provided_session_ids_clause(
return f'AND "{column_name}" in %(session_ids)s', {"session_ids": recording_filters.session_ids}
+def ttl_days(team: Team) -> int:
+ ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7
+ if is_cloud():
+ # NOTE: We use Playlists as a proxy to see if they are subbed to Recordings
+ is_paid = team.organization.is_feature_available(AvailableFeature.RECORDINGS_PLAYLISTS)
+ ttl_days = settings.REPLAY_RETENTION_DAYS_MAX if is_paid else settings.REPLAY_RETENTION_DAYS_MIN
+
+ # NOTE: The date we started reliably ingested data to blob storage
+ days_since_blob_ingestion = (datetime.now() - datetime(2023, 8, 1)).days
+
+ if days_since_blob_ingestion < ttl_days:
+ ttl_days = days_since_blob_ingestion
+
+ return ttl_days
+
+
class PersonsQuery(EventQuery):
_filter: SessionRecordingsFilter
@@ -162,7 +179,10 @@ def __init__(
super().__init__(
**kwargs,
)
- self.ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7
+
+ @property
+ def ttl_days(self):
+ return ttl_days(self._team)
_raw_events_query = """
SELECT
@@ -276,7 +296,7 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any]
base_params = {
"team_id": self._team_id,
- "clamped_to_storage_ttl": (datetime.datetime.now() - datetime.timedelta(days=self.ttl_days)),
+ "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)),
}
_, recording_start_time_params = _get_recording_start_time_clause(self._filter)
@@ -381,7 +401,10 @@ def __init__(
super().__init__(
**kwargs,
)
- self.ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7
+
+ @property
+ def ttl_days(self):
+ return ttl_days(self._team)
_session_recordings_query: str = """
SELECT
@@ -471,7 +494,7 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]:
"team_id": self._team_id,
"limit": self.limit + 1,
"offset": offset,
- "clamped_to_storage_ttl": (datetime.datetime.now() - datetime.timedelta(days=self.ttl_days)),
+ "clamped_to_storage_ttl": (datetime.now() - timedelta(days=self.ttl_days)),
}
_, recording_start_time_params = _get_recording_start_time_clause(self._filter)
diff --git a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py b/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py
index 9f920fd5aa523..88484f316e150 100644
--- a/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py
+++ b/posthog/queries/session_recordings/test/test_session_recording_list_from_session_replay.py
@@ -6,6 +6,8 @@
from freezegun.api import freeze_time
from posthog.clickhouse.client import sync_execute
+from posthog.cloud_utils import TEST_clear_cloud_cache
+from posthog.constants import AvailableFeature
from posthog.models import Person, Cohort
from posthog.models.action import Action
from posthog.models.action_step import ActionStep
@@ -14,6 +16,7 @@
from posthog.models.team import Team
from posthog.queries.session_recordings.session_recording_list_from_replay_summary import (
SessionRecordingListFromReplaySummary,
+ ttl_days,
)
from posthog.queries.session_recordings.test.session_replay_sql import produce_replay_summary
from posthog.test.base import (
@@ -21,8 +24,8 @@
ClickhouseTestMixin,
_create_event,
also_test_with_materialized_columns,
- snapshot_clickhouse_queries,
flush_persons_and_events,
+ snapshot_clickhouse_queries,
)
@@ -617,6 +620,26 @@ def test_event_filter_has_ttl_applied_too(self):
assert len(session_recordings) == 1
assert session_recordings[0]["session_id"] == session_id_one
+ @snapshot_clickhouse_queries
+ def test_ttl_days(self):
+ assert ttl_days(self.team) == 21
+
+ TEST_clear_cloud_cache()
+ with self.is_cloud(True):
+ # Far enough in the future from `days_since_blob_ingestion` but not paid
+ with freeze_time("2023-09-01T12:00:01Z"):
+ assert ttl_days(self.team) == 30
+
+ self.team.organization.available_features = [AvailableFeature.RECORDINGS_PLAYLISTS]
+
+ # Far enough in the future from `days_since_blob_ingestion` but paid
+ with freeze_time("2023-12-01T12:00:01Z"):
+ assert ttl_days(self.team) == 90
+
+ # Not far enough in the future from `days_since_blob_ingestion`
+ with freeze_time("2023-09-05T12:00:01Z"):
+ assert ttl_days(self.team) == 35
+
@snapshot_clickhouse_queries
def test_event_filter_with_active_sessions(
self,
diff --git a/posthog/queries/session_recordings/test/test_session_replay_summaries.py b/posthog/queries/session_recordings/test/test_session_replay_summaries.py
index 0a87ac7473e5a..0b3e361fa9511 100644
--- a/posthog/queries/session_recordings/test/test_session_replay_summaries.py
+++ b/posthog/queries/session_recordings/test/test_session_replay_summaries.py
@@ -1,7 +1,7 @@
from datetime import datetime, timedelta
from uuid import uuid4
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from freezegun import freeze_time
@@ -147,8 +147,8 @@ def test_session_replay_summaries_can_be_queried(self):
session_id,
self.team.pk,
str(self.user.distinct_id),
- datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=pytz.UTC),
- datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=pytz.UTC),
+ datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=ZoneInfo("UTC")),
+ datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=ZoneInfo("UTC")),
33624,
"https://first-url-ingested.com",
6,
diff --git a/posthog/queries/test/test_retention.py b/posthog/queries/test/test_retention.py
index 7f49141447b9a..42b7c596b14a9 100644
--- a/posthog/queries/test/test_retention.py
+++ b/posthog/queries/test/test_retention.py
@@ -2,7 +2,7 @@
import uuid
from datetime import datetime
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from rest_framework import status
@@ -36,15 +36,14 @@ def _create_action(**kwargs):
def _create_signup_actions(team, user_and_timestamps):
-
for distinct_id, timestamp in user_and_timestamps:
_create_event(team=team, event="sign up", distinct_id=distinct_id, timestamp=timestamp)
sign_up_action = _create_action(team=team, name="sign up")
return sign_up_action
-def _date(day, hour=5, month=0):
- return datetime(2020, 6 + month, 10 + day, hour).isoformat()
+def _date(day, hour=5, month=0, minute=0):
+ return datetime(2020, 6 + month, 10 + day, hour, minute).isoformat()
def pluck(list_of_dicts, key, child_key=None):
@@ -53,7 +52,7 @@ def pluck(list_of_dicts, key, child_key=None):
def _create_events(team, user_and_timestamps, event="$pageview"):
i = 0
- for (distinct_id, timestamp, *properties_args) in user_and_timestamps:
+ for distinct_id, timestamp, *properties_args in user_and_timestamps:
properties = {"$some_property": "value"} if i % 2 == 0 else {}
if len(properties_args) == 1:
properties.update(properties_args[0])
@@ -129,7 +128,7 @@ def test_day_interval(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -211,17 +210,17 @@ def test_month_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 1, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 2, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 3, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 4, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 5, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 8, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 9, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 10, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 11, 10, 0, tzinfo=pytz.UTC),
+ datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -372,17 +371,17 @@ def test_month_interval_with_person_on_events_v2(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 1, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 2, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 3, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 4, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 5, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 8, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 9, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 10, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 11, 10, 0, tzinfo=pytz.UTC),
+ datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -425,13 +424,13 @@ def test_week_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 7, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 14, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 21, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 28, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 5, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 12, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 19, 0, tzinfo=pytz.UTC),
+ datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -457,7 +456,7 @@ def test_hour_interval(self):
],
)
- filter = RetentionFilter(data={"date_to": _date(0, hour=16), "period": "Hour"})
+ filter = RetentionFilter(data={"date_to": _date(0, hour=16, minute=13), "period": "Hour"})
result = retention().run(filter, self.team, total_intervals=11)
@@ -498,17 +497,17 @@ def test_hour_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 10, 6, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 7, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 8, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 9, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 10, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 11, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 12, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 13, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 14, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 15, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 16, tzinfo=pytz.UTC),
+ datetime(2020, 6, 10, 6, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 7, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 8, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 9, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 10, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 11, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 12, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 13, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 14, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 15, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 16, tzinfo=ZoneInfo("UTC")),
],
)
@@ -552,13 +551,13 @@ def test_interval_rounding(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 7, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 14, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 21, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 28, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 5, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 12, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 19, 0, tzinfo=pytz.UTC),
+ datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -838,7 +837,7 @@ def test_retention_event_action(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -871,7 +870,6 @@ def test_first_time_retention(self):
)
def test_retention_with_properties(self):
-
_create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"])
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
@@ -902,7 +900,7 @@ def test_retention_with_properties(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -956,7 +954,7 @@ def test_retention_with_user_properties(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
[[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
@@ -1006,7 +1004,7 @@ def test_retention_with_user_properties_via_action(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
[[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
@@ -1047,7 +1045,7 @@ def test_retention_action_start_point(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1086,7 +1084,7 @@ def test_filter_test_accounts(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1156,7 +1154,6 @@ def _create_first_time_retention_events(self):
return p1, p2, p3, p4
def test_retention_aggregate_by_distinct_id(self):
-
_create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"test": "ok"})
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
@@ -1196,7 +1193,7 @@ def test_retention_aggregate_by_distinct_id(self):
"Day 10",
],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1270,7 +1267,7 @@ def test_timezones(self):
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result_pacific[0]["date"], pytz.timezone("US/Pacific").localize(datetime(2020, 6, 10)))
+ self.assertEqual(result_pacific[0]["date"], datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific")))
self.assertEqual(result_pacific[0]["date"].isoformat(), "2020-06-10T00:00:00-07:00")
self.assertEqual(
@@ -1337,7 +1334,7 @@ def test_day_interval_sampled(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py
index 155afbe22c854..3cce0cfd1907a 100644
--- a/posthog/queries/test/test_trends.py
+++ b/posthog/queries/test/test_trends.py
@@ -5,7 +5,7 @@
from unittest.mock import patch, ANY
from urllib.parse import parse_qsl, urlparse
-import pytz
+from zoneinfo import ZoneInfo
from django.conf import settings
from django.core.cache import cache
from django.test import override_settings
@@ -1631,8 +1631,8 @@ def test_hour_interval(self):
)
self.assertEqual(
{
- "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_order": None,
@@ -1687,8 +1687,8 @@ def test_day_interval(self):
)
self.assertEqual(
{
- "date_from": datetime(2020, 11, 1, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_order": None,
@@ -3837,8 +3837,8 @@ def test_breakdown_hour_interval(self):
{
"breakdown_type": "event",
"breakdown_value": "Safari",
- "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_type": "events",
@@ -5603,7 +5603,7 @@ def test_timezones_hourly_relative_from(self):
timestamp="2020-01-05T08:01:01",
)
- query_time = pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 10, 1, 1))
+ query_time = datetime(2020, 1, 5, 10, 1, 1, tzinfo=ZoneInfo(self.team.timezone))
utc_offset_hours = query_time.tzinfo.utcoffset(query_time).total_seconds() // 3600 # type: ignore
utc_offset_sign = "-" if utc_offset_hours < 0 else "+"
with freeze_time(query_time):
@@ -5797,7 +5797,7 @@ def test_timezones_daily(self):
timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere
)
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 5, 0))):
+ with freeze_time(datetime(2020, 1, 5, 5, 0, tzinfo=ZoneInfo(self.team.timezone))):
response = Trends().run(
Filter(data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up"}]}, team=self.team),
self.team,
@@ -6013,7 +6013,7 @@ def test_timezones_weekly(self):
self.team.save()
# TRICKY: This is the previous UTC day in Asia/Tokyo
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))):
+ with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))):
# Total volume query
response_sunday = Trends().run(
Filter(
@@ -6034,7 +6034,7 @@ def test_timezones_weekly(self):
self.team.save()
# TRICKY: This is the previous UTC day in Asia/Tokyo
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))):
+ with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))):
# Total volume query
response_monday = Trends().run(
Filter(
diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py
index b5ffeb0b3c33a..7fe281a0c158c 100644
--- a/posthog/queries/trends/breakdown.py
+++ b/posthog/queries/trends/breakdown.py
@@ -4,7 +4,7 @@
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
-import pytz
+from zoneinfo import ZoneInfo
from django.forms import ValidationError
from posthog.constants import (
@@ -294,7 +294,6 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
)
else:
-
breakdown_filter = breakdown_filter.format(**breakdown_filter_params)
if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
@@ -476,7 +475,6 @@ def _get_breakdown_value(self, breakdown: str) -> str:
return breakdown_value
def _get_histogram_breakdown_values(self, raw_breakdown_value: str, buckets: List[int]):
-
multi_if_conditionals = []
values_arr = []
@@ -599,8 +597,8 @@ def _get_persons_url(
getattr(point_date, "hour", 0),
getattr(point_date, "minute", 0),
getattr(point_date, "second", 0),
- tzinfo=getattr(point_date, "tzinfo", pytz.UTC),
- ).astimezone(pytz.UTC)
+ tzinfo=getattr(point_date, "tzinfo", ZoneInfo("UTC")),
+ ).astimezone(ZoneInfo("UTC"))
filter_params = filter.to_params()
extra_params = {
diff --git a/posthog/queries/trends/test/test_person.py b/posthog/queries/trends/test/test_person.py
index 1d98dfd83b7c9..f68a4ed13b9bd 100644
--- a/posthog/queries/trends/test/test_person.py
+++ b/posthog/queries/trends/test/test_person.py
@@ -1,8 +1,11 @@
+import json
+from datetime import datetime
from uuid import UUID
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from freezegun.api import freeze_time
+from unittest.case import skip
from posthog.models.entity import Entity
from posthog.models.filters import Filter
@@ -15,12 +18,12 @@
ClickhouseTestMixin,
_create_event,
_create_person,
+ flush_persons_and_events,
snapshot_clickhouse_queries,
)
class TestPerson(ClickhouseTestMixin, APIBaseTest):
-
# Note: not using `@snapshot_clickhouse_queries` here because the ordering of the session_ids in the recording
# query is not guaranteed, so adding it would lead to a flaky test.
@freeze_time("2021-01-21T20:00:00.000Z")
@@ -155,3 +158,145 @@ def test_group_query_includes_recording_events(self):
}
],
)
+
+
+class TestPersonIntegration(ClickhouseTestMixin, APIBaseTest):
+ def test_weekly_active_users(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[5], 2)
+
+ persons_url = insight_response[0].get("persons_urls")[5].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_17", "u_16"])
+
+ def test_weekly_active_users_grouped_by_week(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "interval": "week",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[0], "17-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[0], 7)
+
+ persons_url = insight_response[0].get("persons_urls")[0].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 7)
+ self.assertEqual(
+ [item["name"] for item in data.get("results")[0].get("people")],
+ ["u_17", "u_16", "u_15", "u_14", "u_13", "u_12", "u_11"],
+ )
+
+ def test_weekly_active_users_cumulative(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-10T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ "display": "ActionsLineGraphCumulative",
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[1], "11-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[1], 3)
+
+ persons_url = insight_response[0].get("persons_urls")[1].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_11", "u_10"])
+
+ @skip("see PR 17356")
+ def test_weekly_active_users_breakdown(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"a_{d}"])
+ _create_person(team_id=self.team.pk, distinct_ids=[f"b_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"a_{d}",
+ properties={"some_prop": "a"},
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ _create_event(
+ event="pageview",
+ distinct_id=f"b_{d}",
+ properties={"some_prop": "b"},
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ "breakdown": "some_prop",
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023")
+ # self.assertEqual(insight_response[0].get("data")[5], 2)
+
+ persons_url = insight_response[0].get("persons_urls")[5].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ # self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["a_17", "a_16"])
diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py
index 3d57726d7886b..154e105e77f92 100644
--- a/posthog/queries/trends/total_volume.py
+++ b/posthog/queries/trends/total_volume.py
@@ -1,5 +1,5 @@
import urllib.parse
-from datetime import date, datetime
+from datetime import date, datetime, timedelta
from typing import Any, Callable, Dict, List, Tuple, Union
from posthog.clickhouse.query_tagging import tag_queries
@@ -256,6 +256,21 @@ def _parse(result: List) -> List:
return _parse
+ def _offset_date_from(self, point_datetime: datetime, filter: Filter, entity: Entity) -> datetime | None:
+ if filter.display == TRENDS_CUMULATIVE:
+ return filter.date_from
+ elif entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
+ # :TRICKY: We have to offset the date by one, as the final query already subtracts 7 days
+ return point_datetime + timedelta(days=1)
+ else:
+ return point_datetime
+
+ def _offset_date_to(self, point_datetime: datetime, filter: Filter, entity: Entity, team: Team) -> datetime:
+ if entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
+ return point_datetime
+ else:
+ return offset_time_series_date_by_interval(point_datetime, filter=filter, team=team)
+
def _get_persons_url(
self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime]
) -> List[Dict[str, Any]]:
@@ -267,8 +282,8 @@ def _get_persons_url(
"entity_id": entity.id,
"entity_type": entity.type,
"entity_math": entity.math,
- "date_from": filter.date_from if filter.display == TRENDS_CUMULATIVE else point_datetime,
- "date_to": offset_time_series_date_by_interval(point_datetime, filter=filter, team=team),
+ "date_from": self._offset_date_from(point_datetime, filter=filter, entity=entity),
+ "date_to": self._offset_date_to(point_datetime, filter=filter, entity=entity, team=team),
"entity_order": entity.order,
}
diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py
index e7a96b4eeca5e..940abba59fab5 100644
--- a/posthog/queries/trends/trends.py
+++ b/posthog/queries/trends/trends.py
@@ -3,8 +3,8 @@
from datetime import datetime, timedelta
from itertools import accumulate
from typing import Any, Callable, Dict, List, Optional, Tuple, cast
+from zoneinfo import ZoneInfo
-import pytz
from dateutil import parser
from django.db.models.query import Prefetch
from sentry_sdk import push_scope
@@ -49,7 +49,6 @@ def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> Tup
# Use cached result even on refresh if team has strict caching enabled
def get_cached_result(self, filter: Filter, team: Team) -> Optional[List[Dict[str, Any]]]:
-
if not team.strict_caching_enabled or filter.breakdown or filter.display != TRENDS_LINEAR:
return None
@@ -80,7 +79,7 @@ def is_present_timerange(self, cached_result: List[Dict[str, Any]], filter: Filt
latest_date = cached_result[0]["days"][len(cached_result[0]["days"]) - 1]
parsed_latest_date = parser.parse(latest_date)
- parsed_latest_date = parsed_latest_date.replace(tzinfo=pytz.timezone(team.timezone))
+ parsed_latest_date = parsed_latest_date.replace(tzinfo=ZoneInfo(team.timezone))
_is_present = is_filter_date_present(filter, parsed_latest_date)
else:
_is_present = False
diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py
index 00ab25e98460c..93dd843349046 100644
--- a/posthog/queries/trends/trends_event_query_base.py
+++ b/posthog/queries/trends/trends_event_query_base.py
@@ -104,14 +104,14 @@ def _get_not_null_actor_condition(self) -> str:
return f"""AND "$group_{self._entity.math_group_type_index}" != ''"""
def _get_date_filter(self) -> Tuple[str, Dict]:
- date_filter = ""
- query_params: Dict[str, Any] = {}
+ date_query = ""
+ date_params: Dict[str, Any] = {}
query_date_range = QueryDateRange(self._filter, self._team)
parsed_date_from, date_from_params = query_date_range.date_from
parsed_date_to, date_to_params = query_date_range.date_to
- query_params.update(date_from_params)
- query_params.update(date_to_params)
+ date_params.update(date_from_params)
+ date_params.update(date_to_params)
self.parsed_date_from = parsed_date_from
self.parsed_date_to = parsed_date_to
@@ -121,17 +121,17 @@ def _get_date_filter(self) -> Tuple[str, Dict]:
self._filter, self._entity, self._team_id
)
self.active_user_params = active_user_format_params
- query_params.update(active_user_query_params)
+ date_params.update(active_user_query_params)
- date_filter = "{parsed_date_from_prev_range} {parsed_date_to}".format(
+ date_query = "{parsed_date_from_prev_range} {parsed_date_to}".format(
**active_user_format_params, parsed_date_to=parsed_date_to
)
else:
- date_filter = "{parsed_date_from} {parsed_date_to}".format(
+ date_query = "{parsed_date_from} {parsed_date_to}".format(
parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to
)
- return date_filter, query_params
+ return date_query, date_params
def _get_entity_query(self) -> Tuple[str, Dict]:
entity_params, entity_format_params = get_entity_filtering_params(
diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py
index a153e7f0eae56..46cd2a8041f32 100644
--- a/posthog/queries/trends/util.py
+++ b/posthog/queries/trends/util.py
@@ -1,8 +1,8 @@
import datetime
from datetime import timedelta
from typing import Any, Dict, List, Optional, Tuple, TypeVar
+from zoneinfo import ZoneInfo
-import pytz
import structlog
from dateutil.relativedelta import relativedelta
from rest_framework.exceptions import ValidationError
@@ -191,5 +191,5 @@ def offset_time_series_date_by_interval(date: datetime.datetime, *, filter: F, t
else: # "day" is the default interval
date = date.replace(hour=23, minute=59, second=59, microsecond=999999)
if date.tzinfo is None:
- date = pytz.timezone(team.timezone).localize(date)
+ date = date.replace(tzinfo=ZoneInfo(team.timezone))
return date
diff --git a/posthog/queries/util.py b/posthog/queries/util.py
index 936921732285b..ec218785b1dc9 100644
--- a/posthog/queries/util.py
+++ b/posthog/queries/util.py
@@ -3,7 +3,7 @@
from enum import Enum, auto
from typing import Any, Dict, Optional, Union
-import pytz
+from zoneinfo import ZoneInfo
from django.utils import timezone
from rest_framework.exceptions import ValidationError
@@ -67,16 +67,16 @@ class PersonPropertiesMode(Enum):
"month": "toIntervalMonth",
}
+
# TODO: refactor since this is only used in one spot now
def format_ch_timestamp(timestamp: datetime, convert_to_timezone: Optional[str] = None):
if convert_to_timezone:
# Here we probably get a timestamp set to the beginning of the day (00:00), in UTC
# We need to convert that UTC timestamp to the local timestamp (00:00 in US/Pacific for example)
# Then we convert it back to UTC (08:00 in UTC)
- if timestamp.tzinfo and timestamp.tzinfo != pytz.UTC:
+ if timestamp.tzinfo and timestamp.tzinfo != ZoneInfo("UTC"):
raise ValidationError(detail="You must pass a timestamp with no timezone or UTC")
- timestamp = pytz.timezone(convert_to_timezone).localize(timestamp.replace(tzinfo=None)).astimezone(pytz.UTC)
-
+ timestamp = timestamp.replace(tzinfo=ZoneInfo(convert_to_timezone)).astimezone(ZoneInfo("UTC"))
return timestamp.strftime("%Y-%m-%d %H:%M:%S")
diff --git a/posthog/schema.py b/posthog/schema.py
index 72b581e8c863c..b988b9618e0ef 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -369,17 +369,45 @@ class SavedInsightNode(BaseModel):
class Config:
extra = Extra.forbid
+ allowSorting: Optional[bool] = Field(
+ None, description="Can the user click on column headers to sort the table? (default: true)"
+ )
embedded: Optional[bool] = Field(None, description="Query is embedded inside another bordered component")
+ expandable: Optional[bool] = Field(None, description="Can expand row to show raw event data (default: true)")
full: Optional[bool] = Field(None, description="Show with most visual options enabled. Used in insight scene.")
kind: str = Field("SavedInsightNode", const=True)
+ propertiesViaUrl: Optional[bool] = Field(None, description="Link properties via the URL (default: false)")
shortId: str
+ showActions: Optional[bool] = Field(None, description="Show the kebab menu at the end of the row")
+ showColumnConfigurator: Optional[bool] = Field(
+ None, description="Show a button to configure the table's columns if possible"
+ )
showCorrelationTable: Optional[bool] = None
+ showDateRange: Optional[bool] = Field(None, description="Show date range selector")
+ showElapsedTime: Optional[bool] = Field(None, description="Show the time it takes to run a query")
+ showEventFilter: Optional[bool] = Field(
+ None, description="Include an event filter above the table (EventsNode only)"
+ )
+ showExport: Optional[bool] = Field(None, description="Show the export button")
showFilters: Optional[bool] = None
showHeader: Optional[bool] = None
+ showHogQLEditor: Optional[bool] = Field(None, description="Include a HogQL query editor above HogQL tables")
showLastComputation: Optional[bool] = None
showLastComputationRefresh: Optional[bool] = None
+ showOpenEditorButton: Optional[bool] = Field(
+ None, description="Show a button to open the current query as a new insight. (default: true)"
+ )
+ showPersistentColumnConfigurator: Optional[bool] = Field(
+ None, description="Show a button to configure and persist the table's default columns if possible"
+ )
+ showPropertyFilter: Optional[bool] = Field(None, description="Include a property filter above the table")
+ showReload: Optional[bool] = Field(None, description="Show a reload button")
showResults: Optional[bool] = None
+ showResultsTable: Optional[bool] = Field(None, description="Show a results table")
+ showSavedQueries: Optional[bool] = Field(None, description="Shows a list of saved queries")
+ showSearch: Optional[bool] = Field(None, description="Include a free text search field (PersonsNode only)")
showTable: Optional[bool] = None
+ showTimings: Optional[bool] = Field(None, description="Show a detailed query timing breakdown")
class SessionPropertyFilter(BaseModel):
@@ -594,6 +622,14 @@ class Config:
toggledLifecycles: Optional[List[LifecycleToggle]] = None
+class LifecycleQueryResponse(BaseModel):
+ class Config:
+ extra = Extra.forbid
+
+ result: List[Dict[str, Any]]
+ timings: Optional[List[QueryTiming]] = None
+
+
class PersonPropertyFilter(BaseModel):
class Config:
extra = Extra.forbid
@@ -1143,6 +1179,7 @@ class Config:
PropertyGroupFilter,
]
] = Field(None, description="Property filters for all series")
+ response: Optional[LifecycleQueryResponse] = None
samplingFactor: Optional[float] = Field(None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py
index 429566418aa1f..ea19b3b405a2b 100644
--- a/posthog/session_recordings/realtime_snapshots.py
+++ b/posthog/session_recordings/realtime_snapshots.py
@@ -38,6 +38,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
key = get_key(team_id, session_id)
encoded_snapshots = redis.zrange(key, 0, -1, withscores=True)
+ # We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be
+ # sending data to redis
+ redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id}))
+
if not encoded_snapshots and attempt_count < ATTEMPT_MAX:
logger.info(
"No realtime snapshots found, publishing subscription and retrying",
diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py
index 6f5664470c88e..a970414f04fd1 100644
--- a/posthog/settings/ingestion.py
+++ b/posthog/settings/ingestion.py
@@ -38,3 +38,6 @@
"Environment variable REPLAY_EVENTS_NEW_CONSUMER_RATIO is not between 0 and 1. Setting to 0 to be safe."
)
REPLAY_EVENTS_NEW_CONSUMER_RATIO = 0
+
+REPLAY_RETENTION_DAYS_MIN = 30
+REPLAY_RETENTION_DAYS_MAX = 90
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index 9f61e9ee11e82..ca0c035765a7e 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -219,7 +219,7 @@
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATIC_URL = "/static/"
STATICFILES_DIRS = [os.path.join(BASE_DIR, "frontend/dist"), os.path.join(BASE_DIR, "posthog/year_in_posthog/images")]
-STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
+STATICFILES_STORAGE = "whitenoise.storage.ManifestStaticFilesStorage"
AUTH_USER_MODEL = "posthog.User"
diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py
index e43c7ddb817fc..636b3e76b93e9 100644
--- a/posthog/tasks/test/test_usage_report.py
+++ b/posthog/tasks/test/test_usage_report.py
@@ -20,6 +20,7 @@
from posthog.hogql.query import execute_hogql_query
from posthog.models import Organization, Plugin, Team
from posthog.models.dashboard import Dashboard
+from posthog.models.event.util import create_event
from posthog.models.feature_flag import FeatureFlag
from posthog.models.group.util import create_group
from posthog.models.group_type_mapping import GroupTypeMapping
@@ -27,7 +28,16 @@
from posthog.models.sharing_configuration import SharingConfiguration
from posthog.schema import EventsQuery
from posthog.session_recordings.test.test_factory import create_snapshot
-from posthog.tasks.usage_report import capture_event, send_all_org_usage_reports
+from posthog.tasks.usage_report import (
+ _get_all_org_reports,
+ _get_all_usage_data_as_team_rows,
+ _get_full_org_usage_report,
+ _get_full_org_usage_report_as_dict,
+ _get_team_report,
+ capture_event,
+ get_instance_metadata,
+ send_all_org_usage_reports,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseDestroyTablesMixin,
@@ -37,8 +47,7 @@
flush_persons_and_events,
snapshot_clickhouse_queries,
)
-from posthog.models.event.util import create_event
-from posthog.utils import get_machine_id
+from posthog.utils import get_machine_id, get_previous_day
logger = structlog.get_logger(__name__)
@@ -296,16 +305,20 @@ def _test_usage_report(self) -> List[dict]:
self._create_plugin("Installed but not enabled", False)
self._create_plugin("Installed and enabled", True)
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
- report = all_reports[0]
assert report["table_sizes"]
assert report["table_sizes"]["posthog_event"] < 10**7 # <10MB
assert report["table_sizes"]["posthog_sessionrecordingevent"] < 10**7 # <10MB
assert len(all_reports) == 2
- expectation = [
+ expectations = [
{
"deployment_infrastructure": "tests",
"realm": "hosted-clickhouse",
@@ -316,12 +329,12 @@ def _test_usage_report(self) -> List[dict]:
"site_url": "http://test.posthog.com",
"product": "open source",
"helm": {},
- "clickhouse_version": all_reports[0]["clickhouse_version"],
+ "clickhouse_version": report["clickhouse_version"],
"users_who_logged_in": [],
"users_who_logged_in_count": 0,
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
- "table_sizes": all_reports[0]["table_sizes"],
+ "table_sizes": report["table_sizes"],
"plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
@@ -441,12 +454,12 @@ def _test_usage_report(self) -> List[dict]:
"site_url": "http://test.posthog.com",
"product": "open source",
"helm": {},
- "clickhouse_version": all_reports[1]["clickhouse_version"],
+ "clickhouse_version": report["clickhouse_version"],
"users_who_logged_in": [],
"users_who_logged_in_count": 0,
"users_who_signed_up": [],
"users_who_signed_up_count": 0,
- "table_sizes": all_reports[1]["table_sizes"],
+ "table_sizes": report["table_sizes"],
"plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1},
"plugins_enabled": {"Installed and enabled": 1},
"instance_tag": "none",
@@ -525,18 +538,22 @@ def _test_usage_report(self) -> List[dict]:
},
]
- for item in expectation:
+ for item in expectations:
item.update(**self.expected_properties)
# tricky: list could be in different order
assert len(all_reports) == 2
- for report in all_reports:
- if report["organization_id"] == expectation[0]["organization_id"]:
- assert report == expectation[0]
- elif report["organization_id"] == expectation[1]["organization_id"]:
- assert report == expectation[1]
+ full_reports = []
+ for expectation in expectations:
+ report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(
+ all_reports[expectation["organization_id"]], get_instance_metadata(period)
+ )
+ )
+ assert report == expectation
+ full_reports.append(report)
- return all_reports
+ return full_reports
@freeze_time("2022-01-10T00:01:00Z")
@patch("os.environ", {"DEPLOYMENT": "tests"})
@@ -552,6 +569,8 @@ def test_unlicensed_usage_report(self, mock_post: MagicMock, mock_client: MagicM
mock_client.return_value = mock_posthog
all_reports = self._test_usage_report()
+ with self.settings(SITE_URL="http://test.posthog.com"):
+ send_all_org_usage_reports()
# Check calls to other services
mock_post.assert_not_called()
@@ -597,20 +616,21 @@ def test_usage_report_hogql_queries(self) -> None:
run_events_query(query=EventsQuery(select=["event"], limit=50), team=self.team)
sync_execute("SYSTEM FLUSH LOGS")
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
- assert len(all_reports) == 1
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_usage_data_as_team_rows(period_start, period_end)
- report = all_reports[0]["teams"][str(self.team.pk)]
+ report = _get_team_report(all_reports, self.team)
# We selected 200 or 50 rows, but still read 100 rows to return the query
- assert report["hogql_app_rows_read"] == 100
- assert report["hogql_app_bytes_read"] > 0
- assert report["event_explorer_app_rows_read"] == 100
- assert report["event_explorer_app_bytes_read"] > 0
+ assert report.hogql_app_rows_read == 100
+ assert report.hogql_app_bytes_read > 0
+ assert report.event_explorer_app_rows_read == 100
+ assert report.event_explorer_app_bytes_read > 0
# Nothing was read via the API
- assert report["hogql_api_rows_read"] == 0
- assert report["event_explorer_api_rows_read"] == 0
+ assert report.hogql_api_rows_read == 0
+ assert report.event_explorer_api_rows_read == 0
@freeze_time("2022-01-10T00:01:00Z")
@@ -680,21 +700,19 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho
flush_persons_and_events()
with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"):
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
assert len(all_reports) == 3
- all_reports = sorted(all_reports, key=lambda x: x["organization_name"])
-
- assert [all_reports["organization_name"] for all_reports in all_reports] == [
- "Org 1",
- "Org 2",
- "PostHog",
- ]
-
- org_1_report = all_reports[0]
- org_2_report = all_reports[1]
- analytics_report = all_reports[2]
+ org_1_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period))
+ )
+ assert org_1_report["organization_name"] == "Org 1"
+ org_2_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period))
+ )
assert org_1_report["organization_name"] == "Org 1"
assert org_1_report["decide_requests_count_in_period"] == 11
@@ -721,26 +739,6 @@ def test_usage_report_decide_requests(self, billing_task_mock: MagicMock, postho
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0
- # billing service calls are made only for org1, which has decide requests, and analytics org - which has decide usage events.
- calls = [
- call(
- org_1_report["organization_id"],
- ANY,
- ),
- call(
- analytics_report["organization_id"],
- ANY,
- ),
- ]
- assert billing_task_mock.delay.call_count == 2
- billing_task_mock.delay.assert_has_calls(
- calls,
- any_order=True,
- )
-
- # capture usage report calls are made for all orgs
- assert posthog_capture_mock.return_value.capture.call_count == 3
-
@patch("posthog.tasks.usage_report.Client")
@patch("posthog.tasks.usage_report.send_report_to_billing_service")
def test_usage_report_local_evaluation_requests(
@@ -792,21 +790,19 @@ def test_usage_report_local_evaluation_requests(
flush_persons_and_events()
with self.settings(DECIDE_BILLING_ANALYTICS_TOKEN="correct"):
- all_reports = send_all_org_usage_reports(dry_run=False, at=str(now() + relativedelta(days=1)))
+ period = get_previous_day(at=now() + relativedelta(days=1))
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
assert len(all_reports) == 3
- all_reports = sorted(all_reports, key=lambda x: x["organization_name"])
-
- assert [all_reports["organization_name"] for all_reports in all_reports] == [
- "Org 1",
- "Org 2",
- "PostHog",
- ]
-
- org_1_report = all_reports[0]
- org_2_report = all_reports[1]
- analytics_report = all_reports[2]
+ org_1_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_1.id)], get_instance_metadata(period))
+ )
+ assert org_1_report["organization_name"] == "Org 1"
+ org_2_report = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.org_2.id)], get_instance_metadata(period))
+ )
assert org_1_report["organization_name"] == "Org 1"
assert org_1_report["local_evaluation_requests_count_in_period"] == 11
@@ -837,26 +833,6 @@ def test_usage_report_local_evaluation_requests(
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_period"] == 0
assert org_2_report["teams"]["5"]["billable_feature_flag_requests_count_in_month"] == 0
- # billing service calls are made only for org1, which has decide requests, and analytics org - which has local evaluation usage events.
- calls = [
- call(
- org_1_report["organization_id"],
- ANY,
- ),
- call(
- analytics_report["organization_id"],
- ANY,
- ),
- ]
- assert billing_task_mock.delay.call_count == 2
- billing_task_mock.delay.assert_has_calls(
- calls,
- any_order=True,
- )
-
- # capture usage report calls are made for all orgs
- assert posthog_capture_mock.return_value.capture.call_count == 3
-
class SendUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest):
def setUp(self) -> None:
@@ -907,18 +883,26 @@ def test_send_usage(self, mock_post: MagicMock, mock_client: MagicMock) -> None:
mock_posthog = MagicMock()
mock_client.return_value = mock_posthog
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ full_report_as_dict = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
+ send_all_org_usage_reports(dry_run=False)
license = License.objects.first()
assert license
token = build_billing_token(license, self.organization)
mock_post.assert_called_once_with(
- f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"}
+ f"{BILLING_SERVICE_URL}/api/usage",
+ json=full_report_as_dict,
+ headers={"Authorization": f"Bearer {token}"},
)
mock_posthog.capture.assert_any_call(
get_machine_id(),
"organization usage report",
- {**all_reports[0], "scope": "machine"},
+ {**full_report_as_dict, "scope": "machine"},
groups={"instance": ANY},
timestamp=None,
)
@@ -935,18 +919,26 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) ->
mock_posthog = MagicMock()
mock_client.return_value = mock_posthog
- all_reports = send_all_org_usage_reports(dry_run=False)
+ period = get_previous_day()
+ period_start, period_end = period
+ all_reports = _get_all_org_reports(period_start, period_end)
+ full_report_as_dict = _get_full_org_usage_report_as_dict(
+ _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period))
+ )
+ send_all_org_usage_reports(dry_run=False)
license = License.objects.first()
assert license
token = build_billing_token(license, self.organization)
mock_post.assert_called_once_with(
- f"{BILLING_SERVICE_URL}/api/usage", json=all_reports[0], headers={"Authorization": f"Bearer {token}"}
+ f"{BILLING_SERVICE_URL}/api/usage",
+ json=full_report_as_dict,
+ headers={"Authorization": f"Bearer {token}"},
)
mock_posthog.capture.assert_any_call(
self.user.distinct_id,
"organization usage report",
- {**all_reports[0], "scope": "user"},
+ {**full_report_as_dict, "scope": "user"},
groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)},
timestamp=None,
)
diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py
index 45f82b9882374..612213086629e 100644
--- a/posthog/tasks/usage_report.py
+++ b/posthog/tasks/usage_report.py
@@ -534,6 +534,281 @@ def convert_team_usage_rows_to_dict(rows: List[Union[dict, Tuple[int, int]]]) ->
return team_id_map
+def _get_all_usage_data(period_start: datetime, period_end: datetime) -> Dict[str, Any]:
+ """
+ Gets all usage data for the specified period. Clickhouse is good at counting things so
+ we count across all teams rather than doing it one by one
+ """
+ return dict(
+ teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(),
+ teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period(
+ period_start, period_end, count_distinct=True
+ ),
+ teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period(
+ period_start.replace(day=1), period_end
+ ),
+ teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period(
+ period_start, period_end
+ ),
+ # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end),
+ # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end),
+ teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end),
+ teams_with_recording_count_total=get_teams_with_recording_count_total(),
+ teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
+ period_start, period_end, FlagRequestType.DECIDE
+ ),
+ teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
+ period_start.replace(day=1), period_end, FlagRequestType.DECIDE
+ ),
+ teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
+ period_start, period_end, FlagRequestType.LOCAL_EVALUATION
+ ),
+ teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
+ period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION
+ ),
+ teams_with_group_types_total=list(
+ GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_dashboard_count=list(
+ Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_dashboard_template_count=list(
+ Dashboard.objects.filter(creation_mode="template")
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_dashboard_shared_count=list(
+ Dashboard.objects.filter(sharingconfiguration__enabled=True)
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_dashboard_tagged_count=list(
+ Dashboard.objects.filter(tagged_items__isnull=False)
+ .values("team_id")
+ .annotate(total=Count("id"))
+ .order_by("team_id")
+ ),
+ teams_with_ff_count=list(FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")),
+ teams_with_ff_active_count=list(
+ FeatureFlag.objects.filter(active=True).values("team_id").annotate(total=Count("id")).order_by("team_id")
+ ),
+ teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_app_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="",
+ ),
+ teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_hogql_api_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["hogql_query", "HogQLQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["EventsQuery"],
+ access_method="",
+ ),
+ teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_bytes",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="read_rows",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric(
+ period_start,
+ period_end,
+ metric="query_duration_ms",
+ query_types=["EventsQuery"],
+ access_method="personal_api_key",
+ ),
+ )
+
+
+def _get_all_usage_data_as_team_rows(period_start: datetime, period_end: datetime) -> Dict[str, Any]:
+ """
+ Gets all usage data for the specified period as a map of team_id -> value. This makes it faster
+ to access the data than looping over all_data to find what we want.
+ """
+ all_data = _get_all_usage_data(period_start, period_end)
+ # convert it to a map of team_id -> value
+ for key, rows in all_data.items():
+ all_data[key] = convert_team_usage_rows_to_dict(rows)
+ return all_data
+
+
+def _get_teams_for_usage_reports() -> Sequence[Team]:
+ return list(
+ Team.objects.select_related("organization").exclude(
+ Q(organization__for_internal_metrics=True) | Q(is_demo=True)
+ )
+ )
+
+
+def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounters:
+ decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0)
+ decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0)
+ local_evaluation_requests_count_in_period = all_data["teams_with_local_evaluation_requests_count_in_period"].get(
+ team.id, 0
+ )
+ local_evaluation_requests_count_in_month = all_data["teams_with_local_evaluation_requests_count_in_month"].get(
+ team.id, 0
+ )
+ return UsageReportCounters(
+ event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0),
+ event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0),
+ event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0),
+ event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(team.id, 0),
+ # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0),
+ # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0),
+ recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0),
+ recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0),
+ group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0),
+ decide_requests_count_in_period=decide_requests_count_in_period,
+ decide_requests_count_in_month=decide_requests_count_in_month,
+ local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period,
+ local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month,
+ billable_feature_flag_requests_count_in_month=decide_requests_count_in_month
+ + (local_evaluation_requests_count_in_month * 10),
+ billable_feature_flag_requests_count_in_period=decide_requests_count_in_period
+ + (local_evaluation_requests_count_in_period * 10),
+ dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0),
+ dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0),
+ dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0),
+ dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0),
+ ff_count=all_data["teams_with_ff_count"].get(team.id, 0),
+ ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0),
+ hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0),
+ hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0),
+ hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0),
+ hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0),
+ hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0),
+ hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0),
+ event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0),
+ event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0),
+ event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0),
+ event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0),
+ event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0),
+ event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0),
+ )
+
+
+def _add_team_report_to_org_reports(
+ org_reports: Dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime
+) -> None:
+ org_id = str(team.organization.id)
+ if org_id not in org_reports:
+ org_report = OrgReport(
+ date=period_start.strftime("%Y-%m-%d"),
+ organization_id=org_id,
+ organization_name=team.organization.name,
+ organization_created_at=team.organization.created_at.isoformat(),
+ organization_user_count=get_org_user_count(org_id),
+ team_count=1,
+ teams={str(team.id): team_report},
+ **dataclasses.asdict(team_report), # Clone the team report as the basis
+ )
+ org_reports[org_id] = org_report
+ else:
+ org_report = org_reports[org_id]
+ org_report.teams[str(team.id)] = team_report
+ org_report.team_count += 1
+
+ # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report
+ for field in dataclasses.fields(UsageReportCounters):
+ if hasattr(team_report, field.name):
+ setattr(
+ org_report,
+ field.name,
+ getattr(org_report, field.name) + getattr(team_report, field.name),
+ )
+
+
+def _get_all_org_reports(period_start: datetime, period_end: datetime) -> Dict[str, OrgReport]:
+ all_data = _get_all_usage_data_as_team_rows(period_start, period_end)
+
+ teams = _get_teams_for_usage_reports()
+
+ org_reports: Dict[str, OrgReport] = {}
+
+ print("Generating reports for teams...") # noqa T201
+ time_now = datetime.now()
+ for team in teams:
+ team_report = _get_team_report(all_data, team)
+ _add_team_report_to_org_reports(org_reports, team, team_report, period_start)
+
+ time_since = datetime.now() - time_now
+ print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201
+ return org_reports
+
+
+def _get_full_org_usage_report(org_report: OrgReport, instance_metadata: InstanceMetadata) -> FullUsageReport:
+ return FullUsageReport(
+ **dataclasses.asdict(org_report),
+ **dataclasses.asdict(instance_metadata),
+ )
+
+
+def _get_full_org_usage_report_as_dict(full_report: FullUsageReport) -> Dict[str, Any]:
+ return dataclasses.asdict(full_report)
+
+
@app.task(ignore_result=True, max_retries=3, autoretry_for=(Exception,))
def send_all_org_usage_reports(
dry_run: bool = False,
@@ -541,7 +816,7 @@ def send_all_org_usage_reports(
capture_event_name: Optional[str] = None,
skip_capture_event: bool = False,
only_organization_id: Optional[str] = None,
-) -> List[dict]: # Dict[str, OrgReport]:
+) -> None:
capture_event_name = capture_event_name or "organization usage report"
at_date = parser.parse(at) if at else None
@@ -550,250 +825,8 @@ def send_all_org_usage_reports(
instance_metadata = get_instance_metadata(period)
- # Clickhouse is good at counting things so we count across all teams rather than doing it one by one
try:
- all_data = dict(
- teams_with_event_count_lifetime=get_teams_with_event_count_lifetime(),
- teams_with_event_count_in_period=get_teams_with_billable_event_count_in_period(
- period_start, period_end, count_distinct=True
- ),
- teams_with_event_count_in_month=get_teams_with_billable_event_count_in_period(
- period_start.replace(day=1), period_end
- ),
- teams_with_event_count_with_groups_in_period=get_teams_with_event_count_with_groups_in_period(
- period_start, period_end
- ),
- # teams_with_event_count_by_lib=get_teams_with_event_count_by_lib(period_start, period_end),
- # teams_with_event_count_by_name=get_teams_with_event_count_by_name(period_start, period_end),
- teams_with_recording_count_in_period=get_teams_with_recording_count_in_period(period_start, period_end),
- teams_with_recording_count_total=get_teams_with_recording_count_total(),
- teams_with_decide_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
- period_start, period_end, FlagRequestType.DECIDE
- ),
- teams_with_decide_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
- period_start.replace(day=1), period_end, FlagRequestType.DECIDE
- ),
- teams_with_local_evaluation_requests_count_in_period=get_teams_with_feature_flag_requests_count_in_period(
- period_start, period_end, FlagRequestType.LOCAL_EVALUATION
- ),
- teams_with_local_evaluation_requests_count_in_month=get_teams_with_feature_flag_requests_count_in_period(
- period_start.replace(day=1), period_end, FlagRequestType.LOCAL_EVALUATION
- ),
- teams_with_group_types_total=list(
- GroupTypeMapping.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_dashboard_count=list(
- Dashboard.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_dashboard_template_count=list(
- Dashboard.objects.filter(creation_mode="template")
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_dashboard_shared_count=list(
- Dashboard.objects.filter(sharingconfiguration__enabled=True)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_dashboard_tagged_count=list(
- Dashboard.objects.filter(tagged_items__isnull=False)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_ff_count=list(
- FeatureFlag.objects.values("team_id").annotate(total=Count("id")).order_by("team_id")
- ),
- teams_with_ff_active_count=list(
- FeatureFlag.objects.filter(active=True)
- .values("team_id")
- .annotate(total=Count("id"))
- .order_by("team_id")
- ),
- teams_with_hogql_app_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_app_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_app_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="",
- ),
- teams_with_hogql_api_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_hogql_api_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_hogql_api_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["hogql_query", "HogQLQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_app_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_app_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_app_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["EventsQuery"],
- access_method="",
- ),
- teams_with_event_explorer_api_bytes_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_bytes",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_api_rows_read=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="read_rows",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- teams_with_event_explorer_api_duration_ms=get_teams_with_hogql_metric(
- period_start,
- period_end,
- metric="query_duration_ms",
- query_types=["EventsQuery"],
- access_method="personal_api_key",
- ),
- )
-
- # The data is all as raw rows which will dramatically slow down the upcoming loop
- # so we convert it to a map of team_id -> value
- for key, rows in all_data.items():
- all_data[key] = convert_team_usage_rows_to_dict(rows)
-
- teams: Sequence[Team] = list(
- Team.objects.select_related("organization").exclude(
- Q(organization__for_internal_metrics=True) | Q(is_demo=True)
- )
- )
-
- org_reports: Dict[str, OrgReport] = {}
-
- print("Generating reports for teams...") # noqa T201
- time_now = datetime.now()
- for team in teams:
- decide_requests_count_in_month = all_data["teams_with_decide_requests_count_in_month"].get(team.id, 0)
- decide_requests_count_in_period = all_data["teams_with_decide_requests_count_in_period"].get(team.id, 0)
- local_evaluation_requests_count_in_period = all_data[
- "teams_with_local_evaluation_requests_count_in_period"
- ].get(team.id, 0)
- local_evaluation_requests_count_in_month = all_data[
- "teams_with_local_evaluation_requests_count_in_month"
- ].get(team.id, 0)
-
- team_report = UsageReportCounters(
- event_count_lifetime=all_data["teams_with_event_count_lifetime"].get(team.id, 0),
- event_count_in_period=all_data["teams_with_event_count_in_period"].get(team.id, 0),
- event_count_in_month=all_data["teams_with_event_count_in_month"].get(team.id, 0),
- event_count_with_groups_in_period=all_data["teams_with_event_count_with_groups_in_period"].get(
- team.id, 0
- ),
- # event_count_by_lib: Di all_data["teams_with_#"].get(team.id, 0),
- # event_count_by_name: Di all_data["teams_with_#"].get(team.id, 0),
- recording_count_in_period=all_data["teams_with_recording_count_in_period"].get(team.id, 0),
- recording_count_total=all_data["teams_with_recording_count_total"].get(team.id, 0),
- group_types_total=all_data["teams_with_group_types_total"].get(team.id, 0),
- decide_requests_count_in_period=decide_requests_count_in_period,
- decide_requests_count_in_month=decide_requests_count_in_month,
- local_evaluation_requests_count_in_period=local_evaluation_requests_count_in_period,
- local_evaluation_requests_count_in_month=local_evaluation_requests_count_in_month,
- billable_feature_flag_requests_count_in_month=decide_requests_count_in_month
- + (local_evaluation_requests_count_in_month * 10),
- billable_feature_flag_requests_count_in_period=decide_requests_count_in_period
- + (local_evaluation_requests_count_in_period * 10),
- dashboard_count=all_data["teams_with_dashboard_count"].get(team.id, 0),
- dashboard_template_count=all_data["teams_with_dashboard_template_count"].get(team.id, 0),
- dashboard_shared_count=all_data["teams_with_dashboard_shared_count"].get(team.id, 0),
- dashboard_tagged_count=all_data["teams_with_dashboard_tagged_count"].get(team.id, 0),
- ff_count=all_data["teams_with_ff_count"].get(team.id, 0),
- ff_active_count=all_data["teams_with_ff_active_count"].get(team.id, 0),
- hogql_app_bytes_read=all_data["teams_with_hogql_app_bytes_read"].get(team.id, 0),
- hogql_app_rows_read=all_data["teams_with_hogql_app_rows_read"].get(team.id, 0),
- hogql_app_duration_ms=all_data["teams_with_hogql_app_duration_ms"].get(team.id, 0),
- hogql_api_bytes_read=all_data["teams_with_hogql_api_bytes_read"].get(team.id, 0),
- hogql_api_rows_read=all_data["teams_with_hogql_api_rows_read"].get(team.id, 0),
- hogql_api_duration_ms=all_data["teams_with_hogql_api_duration_ms"].get(team.id, 0),
- event_explorer_app_bytes_read=all_data["teams_with_event_explorer_app_bytes_read"].get(team.id, 0),
- event_explorer_app_rows_read=all_data["teams_with_event_explorer_app_rows_read"].get(team.id, 0),
- event_explorer_app_duration_ms=all_data["teams_with_event_explorer_app_duration_ms"].get(team.id, 0),
- event_explorer_api_bytes_read=all_data["teams_with_event_explorer_api_bytes_read"].get(team.id, 0),
- event_explorer_api_rows_read=all_data["teams_with_event_explorer_api_rows_read"].get(team.id, 0),
- event_explorer_api_duration_ms=all_data["teams_with_event_explorer_api_duration_ms"].get(team.id, 0),
- )
-
- org_id = str(team.organization.id)
-
- if org_id not in org_reports:
- org_report = OrgReport(
- date=period_start.strftime("%Y-%m-%d"),
- organization_id=org_id,
- organization_name=team.organization.name,
- organization_created_at=team.organization.created_at.isoformat(),
- organization_user_count=get_org_user_count(org_id),
- team_count=1,
- teams={str(team.id): team_report},
- **dataclasses.asdict(team_report), # Clone the team report as the basis
- )
- org_reports[org_id] = org_report
- else:
- org_report = org_reports[org_id]
- org_report.teams[str(team.id)] = team_report
- org_report.team_count += 1
-
- # Iterate on all fields of the UsageReportCounters and add the values from the team report to the org report
- for field in dataclasses.fields(UsageReportCounters):
- if hasattr(team_report, field.name):
- setattr(
- org_report,
- field.name,
- getattr(org_report, field.name) + getattr(team_report, field.name),
- )
- time_since = datetime.now() - time_now
- print(f"Generating reports for teams took {time_since.total_seconds()} seconds.") # noqa T201
-
- all_reports = []
+ org_reports = _get_all_org_reports(period_start, period_end)
print("Sending usage reports to PostHog and Billing...") # noqa T201
time_now = datetime.now()
@@ -803,12 +836,8 @@ def send_all_org_usage_reports(
if only_organization_id and only_organization_id != org_id:
continue
- full_report = FullUsageReport(
- **dataclasses.asdict(org_report),
- **dataclasses.asdict(instance_metadata),
- )
- full_report_dict = dataclasses.asdict(full_report)
- all_reports.append(full_report_dict)
+ full_report = _get_full_org_usage_report(org_report, instance_metadata)
+ full_report_dict = _get_full_org_usage_report_as_dict(full_report)
if dry_run:
continue
@@ -823,7 +852,6 @@ def send_all_org_usage_reports(
send_report_to_billing_service.delay(org_id, full_report_dict)
time_since = datetime.now() - time_now
print(f"Sending usage reports to PostHog and Billing took {time_since.total_seconds()} seconds.") # noqa T201
- return all_reports
except Exception as err:
capture_exception(err)
raise err
diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
index 392534fc8999c..b4e51bc9f8b8e 100644
--- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
@@ -3,11 +3,13 @@
import gzip
import itertools
import json
+import os
from random import randint
from unittest import mock
from uuid import uuid4
import boto3
+import botocore.exceptions
import brotli
import pytest
from django.conf import settings
@@ -40,6 +42,18 @@
TEST_ROOT_BUCKET = "test-batch-exports"
+
+def check_valid_credentials() -> bool:
+ """Check if there are valid AWS credentials in the environment."""
+ sts = boto3.client("sts")
+ try:
+ sts.get_caller_identity()
+ except botocore.exceptions.ClientError:
+ return False
+ else:
+ return True
+
+
create_test_client = functools.partial(boto3.client, endpoint_url=settings.OBJECT_STORAGE_ENDPOINT)
@@ -422,6 +436,165 @@ async def test_s3_export_workflow_with_minio_bucket(
assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+@pytest.mark.skipif(
+ "S3_TEST_BUCKET" not in os.environ or not check_valid_credentials(),
+ reason="AWS credentials not set in environment or missing S3_TEST_BUCKET variable",
+)
+@pytest.mark.django_db
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "interval,compression,encryption,exclude_events",
+ itertools.product(["hour", "day"], [None, "gzip", "brotli"], [None, "AES256", "aws:kms"], [None, ["test-exclude"]]),
+)
+async def test_s3_export_workflow_with_s3_bucket(interval, compression, encryption, exclude_events):
+ """Test S3 Export Workflow end-to-end by using an S3 bucket.
+
+ The S3_TEST_BUCKET environment variable is used to set the name of the bucket for this test.
+ This test will be skipped if no valid AWS credentials exist, or if the S3_TEST_BUCKET environment
+ variable is not set.
+
+ The workflow should update the batch export run status to completed and produce the expected
+ records to the S3 bucket.
+ """
+ bucket_name = os.getenv("S3_TEST_BUCKET")
+ kms_key_id = os.getenv("S3_TEST_KMS_KEY_ID")
+ prefix = f"posthog-events-{str(uuid4())}"
+ destination_data = {
+ "type": "S3",
+ "config": {
+ "bucket_name": bucket_name,
+ "region": "us-east-1",
+ "prefix": prefix,
+ "aws_access_key_id": "object_storage_root_user",
+ "aws_secret_access_key": "object_storage_root_password",
+ "compression": compression,
+ "exclude_events": exclude_events,
+ "encryption": encryption,
+ "kms_key_id": kms_key_id if encryption == "aws:kms" else None,
+ },
+ }
+
+ batch_export_data = {
+ "name": "my-production-s3-bucket-destination",
+ "destination": destination_data,
+ "interval": interval,
+ }
+
+ organization = await acreate_organization("test")
+ team = await acreate_team(organization=organization)
+ batch_export = await acreate_batch_export(
+ team_id=team.pk,
+ name=batch_export_data["name"],
+ destination_data=batch_export_data["destination"],
+ interval=batch_export_data["interval"],
+ )
+
+ events: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 13:30:00.000000",
+ "created_at": "2023-04-25 13:30:00.000000",
+ "inserted_at": "2023-04-25 13:30:00.000000",
+ "_timestamp": "2023-04-25 13:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ {
+ "uuid": str(uuid4()),
+ "event": "test-exclude",
+ "timestamp": "2023-04-25 14:29:00.000000",
+ "created_at": "2023-04-25 14:29:00.000000",
+ "inserted_at": "2023-04-25 14:29:00.000000",
+ "_timestamp": "2023-04-25 14:29:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ ]
+
+ if interval == "day":
+ # Add an event outside the hour range but within the day range to ensure it's exported too.
+ events_outside_hour: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 00:30:00.000000",
+ "created_at": "2023-04-25 00:30:00.000000",
+ "inserted_at": "2023-04-25 00:30:00.000000",
+ "_timestamp": "2023-04-25 00:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ }
+ ]
+ events += events_outside_hour
+
+ ch_client = ClickHouseClient(
+ url=settings.CLICKHOUSE_HTTP_URL,
+ user=settings.CLICKHOUSE_USER,
+ password=settings.CLICKHOUSE_PASSWORD,
+ database=settings.CLICKHOUSE_DATABASE,
+ )
+
+ # Insert some data into the `sharded_events` table.
+ await insert_events(
+ client=ch_client,
+ events=events,
+ )
+
+ workflow_id = str(uuid4())
+ inputs = S3BatchExportInputs(
+ team_id=team.pk,
+ batch_export_id=str(batch_export.id),
+ data_interval_end="2023-04-25 14:30:00.000000",
+ interval=interval,
+ **batch_export.destination.config,
+ )
+
+ s3_client = boto3.client("s3")
+
+ def create_s3_client(*args, **kwargs):
+ """Mock function to return an already initialized S3 client."""
+ return s3_client
+
+ async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
+ async with Worker(
+ activity_environment.client,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ workflows=[S3BatchExportWorkflow],
+ activities=[create_export_run, insert_into_s3_activity, update_export_run_status],
+ workflow_runner=UnsandboxedWorkflowRunner(),
+ ):
+ with mock.patch("posthog.temporal.workflows.s3_batch_export.boto3.client", side_effect=create_s3_client):
+ await activity_environment.client.execute_workflow(
+ S3BatchExportWorkflow.run,
+ inputs,
+ id=workflow_id,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ retry_policy=RetryPolicy(maximum_attempts=1),
+ execution_timeout=dt.timedelta(seconds=10),
+ )
+
+ runs = await afetch_batch_export_runs(batch_export_id=batch_export.id)
+ assert len(runs) == 1
+
+ run = runs[0]
+ assert run.status == "Completed"
+
+ assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+
+
@pytest.mark.django_db
@pytest.mark.asyncio
@pytest.mark.parametrize("compression", [None, "gzip"])
diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py
index a396f361b77c5..b81c7496b3adb 100644
--- a/posthog/temporal/workflows/postgres_batch_export.py
+++ b/posthog/temporal/workflows/postgres_batch_export.py
@@ -58,9 +58,10 @@ def copy_tsv_to_postgres(tsv_file, postgres_connection, schema: str, table_name:
tsv_file.seek(0)
with postgres_connection.cursor() as cursor:
+ cursor.execute(sql.SQL("SET search_path TO {schema}").format(schema=sql.Identifier(schema)))
cursor.copy_from(
tsv_file,
- sql.Identifier(schema, table_name).as_string(postgres_connection),
+ table_name,
null="",
columns=schema_columns,
)
@@ -245,7 +246,11 @@ async def run(self, inputs: PostgresBatchExportInputs):
initial_interval=dt.timedelta(seconds=10),
maximum_interval=dt.timedelta(seconds=120),
maximum_attempts=10,
- non_retryable_error_types=[],
+ non_retryable_error_types=[
+ # Raised on errors that are related to database operation.
+ # For example: unexpected disconnect, database or other object not found.
+ "OperationalError"
+ ],
),
)
diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py
index 028b6f422e26f..13bbf183e5d06 100644
--- a/posthog/temporal/workflows/s3_batch_export.py
+++ b/posthog/temporal/workflows/s3_batch_export.py
@@ -85,15 +85,20 @@ class S3MultiPartUploadState(typing.NamedTuple):
parts: list[dict[str, str | int]]
+Part = dict[str, str | int]
+
+
class S3MultiPartUpload:
"""An S3 multi-part upload."""
- def __init__(self, s3_client, bucket_name, key):
+ def __init__(self, s3_client, bucket_name: str, key: str, encryption: str | None, kms_key_id: str | None):
self.s3_client = s3_client
self.bucket_name = bucket_name
self.key = key
- self.upload_id = None
- self.parts = []
+ self.encryption = encryption
+ self.kms_key_id = kms_key_id
+ self.upload_id: str | None = None
+ self.parts: list[Part] = []
def to_state(self) -> S3MultiPartUploadState:
"""Produce state tuple that can be used to resume this S3MultiPartUpload."""
@@ -119,10 +124,21 @@ def start(self) -> str:
if self.is_upload_in_progress() is True:
raise UploadAlreadyInProgressError(self.upload_id)
- multipart_response = self.s3_client.create_multipart_upload(Bucket=self.bucket_name, Key=self.key)
- self.upload_id = multipart_response["UploadId"]
+ optional_kwargs = {}
+ if self.encryption:
+ optional_kwargs["ServerSideEncryption"] = self.encryption
+ if self.kms_key_id:
+ optional_kwargs["SSEKMSKeyId"] = self.kms_key_id
- return self.upload_id
+ multipart_response = self.s3_client.create_multipart_upload(
+ Bucket=self.bucket_name,
+ Key=self.key,
+ **optional_kwargs,
+ )
+ upload_id: str = multipart_response["UploadId"]
+ self.upload_id = upload_id
+
+ return upload_id
def continue_from_state(self, state: S3MultiPartUploadState):
"""Continue this S3MultiPartUpload from a previous state."""
@@ -230,6 +246,8 @@ class S3InsertInputs:
aws_secret_access_key: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]:
@@ -241,7 +259,7 @@ def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3Mu
aws_access_key_id=inputs.aws_access_key_id,
aws_secret_access_key=inputs.aws_secret_access_key,
)
- s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key)
+ s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key, inputs.encryption, inputs.kms_key_id)
details = activity.info().heartbeat_details
@@ -442,6 +460,8 @@ async def run(self, inputs: S3BatchExportInputs):
data_interval_end=data_interval_end.isoformat(),
compression=inputs.compression,
exclude_events=inputs.exclude_events,
+ encryption=inputs.encryption,
+ kms_key_id=inputs.kms_key_id,
)
try:
await workflow.execute_activity(
diff --git a/posthog/test/test_datetime.py b/posthog/test/test_datetime.py
new file mode 100644
index 0000000000000..b25fa7098f9b5
--- /dev/null
+++ b/posthog/test/test_datetime.py
@@ -0,0 +1,33 @@
+from datetime import datetime, timezone
+
+from posthog.datetime import start_of_hour, start_of_day, end_of_day, start_of_week, start_of_month
+
+
+def test_start_of_hour():
+ assert start_of_hour(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-08T12:00:00+00:00"
+ )
+
+
+def test_start_of_day():
+ assert start_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-08T00:00:00+00:00"
+ )
+
+
+def test_end_of_day():
+ assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime(
+ 2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc
+ )
+
+
+def test_start_of_week():
+ assert start_of_week(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-05T00:00:00+00:00"
+ )
+
+
+def test_start_of_month():
+ assert start_of_month(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-01T00:00:00+00:00"
+ )
diff --git a/posthog/test/test_decorators.py b/posthog/test/test_decorators.py
index 9cbb181c3f261..a6bc176072377 100644
--- a/posthog/test/test_decorators.py
+++ b/posthog/test/test_decorators.py
@@ -1,12 +1,18 @@
-from posthog.decorators import cached_by_filters
+from datetime import datetime
+from freezegun import freeze_time
+from posthog.decorators import cached_by_filters, is_stale
from django.core.cache import cache
from rest_framework.test import APIRequestFactory
from rest_framework.viewsets import GenericViewSet
from rest_framework.response import Response
+from posthog.models.filters.filter import Filter
+from posthog.models.filters.path_filter import PathFilter
+from posthog.models.filters.retention_filter import RetentionFilter
+from posthog.models.filters.stickiness_filter import StickinessFilter
-from posthog.test.base import APIBaseTest
+from posthog.test.base import APIBaseTest, BaseTest
from posthog.api import router
factory = APIRequestFactory()
@@ -22,7 +28,7 @@ def calculate_with_filters(self, request):
return {"result": "bla"}
-class TestDecorators(APIBaseTest):
+class TestCachedByFiltersDecorator(APIBaseTest):
def setUp(self) -> None:
cache.clear()
@@ -61,3 +67,161 @@ def test_cache_bypass_with_invalidation_key_param(self) -> None:
response = self.client.get(f"/api/dummy", data={"cache_invalidation_key": "abc"}).json()
assert response["is_cached"] is False
+
+ def test_discards_stale_response(self) -> None:
+ with freeze_time("2023-02-08T12:05:23Z"):
+ # cache the result
+ self.client.get(f"/api/dummy").json()
+
+ with freeze_time("2023-02-10T12:00:00Z"):
+ # we don't need to add filters, since -7d with a
+ # daily interval is the default
+ response = self.client.get(f"/api/dummy").json()
+ assert response["is_cached"] is False
+
+
+class TestIsStaleHelper(BaseTest):
+ cached_response = {"last_refresh": datetime.fromisoformat("2023-02-08T12:05:23+00:00"), "result": "bla"}
+
+ def test_keeps_fresh_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T12:59:59Z"):
+ filter = Filter(data={"interval": "hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T13:00:00Z"):
+ filter = Filter(data={"interval": "hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_daily_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = Filter(data={"interval": "day"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_daily_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = Filter(data={"interval": "day"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_weekly_result(self) -> None:
+ with freeze_time("2023-02-11T23:59:59Z"):
+ filter = Filter(data={"interval": "week"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_weekly_result(self) -> None:
+ with freeze_time("2023-02-12T00:00:00Z"):
+ filter = Filter(data={"interval": "week"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_monthly_result(self) -> None:
+ with freeze_time("2023-02-28T23:59:59Z"):
+ filter = Filter(data={"interval": "month"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_monthly_result(self) -> None:
+ with freeze_time("2023-03-01T00:00:00Z"):
+ filter = Filter(data={"interval": "month"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_result_from_fixed_range(self) -> None:
+ filter = Filter(data={"interval": "day", "date_from": "2000-01-01", "date_to": "2000-01-10"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_keeps_fresh_result_with_date_to_in_future(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = Filter(data={"interval": "day", "date_to": "2999-01-01"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_keeps_fresh_stickiness_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = StickinessFilter(data={}, team=self.team)
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_stickiness_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = StickinessFilter(data={}, team=self.team)
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_path_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = PathFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_path_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = PathFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_retention_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T12:59:59Z"):
+ filter = RetentionFilter(data={"period": "Hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_retention_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T13:00:00Z"):
+ filter = RetentionFilter(data={"period": "Hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_retention_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = RetentionFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_retention_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = RetentionFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
diff --git a/posthog/utils.py b/posthog/utils.py
index ddbb32bfb2a5a..5e605415ebc3e 100644
--- a/posthog/utils.py
+++ b/posthog/utils.py
@@ -28,11 +28,11 @@
cast,
)
from urllib.parse import urljoin, urlparse
-from zoneinfo import ZoneInfo
import lzstring
import posthoganalytics
import pytz
+from zoneinfo import ZoneInfo
import structlog
from celery.schedules import crontab
from dateutil import parser
@@ -128,13 +128,13 @@ def get_previous_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.d
period_end: datetime.datetime = datetime.datetime.combine(
at - datetime.timedelta(days=1),
datetime.time.max,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very end of the previous day
period_start: datetime.datetime = datetime.datetime.combine(
period_end,
datetime.time.min,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very start of the previous day
return (period_start, period_end)
@@ -152,13 +152,13 @@ def get_current_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.da
period_end: datetime.datetime = datetime.datetime.combine(
at,
datetime.time.max,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very end of the reference day
period_start: datetime.datetime = datetime.datetime.combine(
period_end,
datetime.time.min,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very start of the reference day
return (period_start, period_end)
@@ -1087,7 +1087,7 @@ def cast_timestamp_or_now(timestamp: Optional[Union[timezone.datetime, str]]) ->
if isinstance(timestamp, str):
timestamp = parser.isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py
index 10e61444e8250..dcb1b2297216f 100644
--- a/posthog/warehouse/models/table.py
+++ b/posthog/warehouse/models/table.py
@@ -8,6 +8,7 @@
StringDatabaseField,
IntegerDatabaseField,
DateTimeDatabaseField,
+ DateDatabaseField,
StringJSONDatabaseField,
BooleanDatabaseField,
StringArrayDatabaseField,
@@ -20,6 +21,7 @@
"String": StringDatabaseField,
"DateTime64": DateTimeDatabaseField,
"DateTime32": DateTimeDatabaseField,
+ "Date": DateDatabaseField,
"UInt8": IntegerDatabaseField,
"UInt16": IntegerDatabaseField,
"UInt32": IntegerDatabaseField,
@@ -47,6 +49,7 @@ class DataWarehouseTable(CreatedMetaFields, UUIDModel, DeletedMetaFields):
class TableFormat(models.TextChoices):
CSV = "CSV", "CSV"
Parquet = "Parquet", "Parquet"
+ JSON = "JSONEachRow", "JSON"
name: models.CharField = models.CharField(max_length=128)
format: models.CharField = models.CharField(max_length=128, choices=TableFormat.choices)
diff --git a/requirements.in b/requirements.in
index 0fecfd58aad1f..09b47a4c2aa95 100644
--- a/requirements.in
+++ b/requirements.in
@@ -9,7 +9,7 @@ antlr4-python3-runtime==4.13.0
amqp==2.6.0
boto3==1.26.66
boto3-stubs[s3]
-brotli==1.0.9
+brotli==1.1.0
celery==4.4.7
celery-redbeat==2.0.0
clickhouse-driver==0.2.4
@@ -55,7 +55,7 @@ pickleshare==0.7.5
Pillow==9.2.0
posthoganalytics==3.0.1
prance==0.22.2.22.0
-psycopg2-binary==2.8.6
+psycopg2-binary==2.9.7
pyarrow==12.0.1
pydantic==1.10.4
pyjwt==2.4.0
@@ -78,7 +78,7 @@ temporalio==1.1.0
token-bucket==0.3.0
toronado==0.1.0
webdriver_manager==3.8.5
-whitenoise==5.2.0
+whitenoise==6.5.0
mimesis==5.2.1
more-itertools==9.0.0
django-two-factor-auth==1.14.0
diff --git a/requirements.txt b/requirements.txt
index 972c09cea5b08..4fac42e90302a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -51,8 +51,10 @@ botocore==1.29.66
# s3transfer
botocore-stubs==1.29.130
# via boto3-stubs
-brotli==1.0.9
+brotli==1.1.0
# via -r requirements.in
+cachetools==5.3.1
+ # via google-auth
celery==4.4.7
# via
# -r requirements.in
@@ -319,7 +321,7 @@ protobuf==4.22.1
# grpcio-status
# proto-plus
# temporalio
-psycopg2-binary==2.8.6
+psycopg2-binary==2.9.7
# via -r requirements.in
ptyprocess==0.6.0
# via pexpect
@@ -507,7 +509,7 @@ vine==1.3.0
# celery
webdriver-manager==3.8.5
# via -r requirements.in
-whitenoise==5.2.0
+whitenoise==6.5.0
# via -r requirements.in
wsproto==1.1.0
# via trio-websocket
diff --git a/runtime.txt b/runtime.txt
deleted file mode 100644
index 97691386f3a6e..0000000000000
--- a/runtime.txt
+++ /dev/null
@@ -1 +0,0 @@
-python-3.10.10