diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts
index 356d9a9a06cc6..ca37ff7676737 100644
--- a/frontend/src/scenes/scenes.ts
+++ b/frontend/src/scenes/scenes.ts
@@ -481,5 +481,4 @@ export const routes: Record = {
[urls.feedback()]: Scene.Feedback,
[urls.feedback() + '/*']: Scene.Feedback,
[urls.notebook(':shortId')]: Scene.Notebook,
- [urls.notebookEdit(':shortId')]: Scene.Notebook,
}
diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
similarity index 93%
rename from frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
rename to frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
index 1ac9395728811..1060246c67d27 100644
--- a/frontend/src/scenes/session-recordings/SessionsRecordings.stories.tsx
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-player-success.stories.tsx
@@ -9,7 +9,6 @@ import recordingSnapshotsJson from 'scenes/session-recordings/__mocks__/recordin
import recordingMetaJson from 'scenes/session-recordings/__mocks__/recording_meta.json'
import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
import recording_playlists from './__mocks__/recording_playlists.json'
-import { ReplayTabs } from '~/types'
const meta: Meta = {
title: 'Scenes-App/Recordings',
@@ -17,6 +16,7 @@ const meta: Meta = {
layout: 'fullscreen',
viewMode: 'story',
mockDate: '2023-02-01',
+ waitForSelector: '.PlayerFrame__content .replayer-wrapper iframe',
},
decorators: [
mswDecorator({
@@ -81,7 +81,7 @@ const meta: Meta = {
},
]
},
- '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings?limit=100': (req) => {
+ '/api/projects/:team_id/session_recording_playlists/:playlist_id/recordings': (req) => {
const playlistId = req.params.playlist_id
const response = playlistId === '1234567' ? recordings : []
return [200, { has_next: false, results: response, version: 1 }]
@@ -89,6 +89,12 @@ const meta: Meta = {
// without the session-recording-blob-replay feature flag, we only load via ClickHouse
'/api/projects/:team/session_recordings/:id/snapshots': recordingSnapshotsJson,
'/api/projects/:team/session_recordings/:id': recordingMetaJson,
+ 'api/projects/:team/notebooks': {
+ count: 0,
+ next: null,
+ previous: null,
+ results: [],
+ },
},
post: {
'/api/projects/:team/query': recordingEventsJson,
@@ -97,16 +103,10 @@ const meta: Meta = {
],
}
export default meta
-export function RecordingsList(): JSX.Element {
- useEffect(() => {
- router.actions.push(urls.replay())
- }, [])
- return
-}
-export function RecordingsPlayLists(): JSX.Element {
+export function RecentRecordings(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.replay(ReplayTabs.Playlists))
+ router.actions.push(urls.replay())
}, [])
return
}
diff --git a/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
new file mode 100644
index 0000000000000..657fbccf4bc29
--- /dev/null
+++ b/frontend/src/scenes/session-recordings/SessionsRecordings-playlist-listing.stories.tsx
@@ -0,0 +1,48 @@
+import { Meta } from '@storybook/react'
+import { useEffect } from 'react'
+import { mswDecorator } from '~/mocks/browser'
+import { router } from 'kea-router'
+import { urls } from 'scenes/urls'
+import { App } from 'scenes/App'
+import recording_playlists from './__mocks__/recording_playlists.json'
+import { ReplayTabs } from '~/types'
+import recordings from 'scenes/session-recordings/__mocks__/recordings.json'
+import recordingEventsJson from 'scenes/session-recordings/__mocks__/recording_events_query'
+
+const meta: Meta = {
+ title: 'Scenes-App/Recordings',
+ parameters: {
+ layout: 'fullscreen',
+ viewMode: 'story',
+ mockDate: '2023-02-01',
+ },
+ decorators: [
+ mswDecorator({
+ get: {
+ '/api/projects/:team_id/session_recording_playlists': recording_playlists,
+ '/api/projects/:team_id/session_recordings': (req) => {
+ const version = req.url.searchParams.get('version')
+ return [
+ 200,
+ {
+ has_next: false,
+ results: recordings,
+ version,
+ },
+ ]
+ },
+ },
+ post: {
+ '/api/projects/:team/query': recordingEventsJson,
+ },
+ }),
+ ],
+}
+export default meta
+
+export function RecordingsPlayLists(): JSX.Element {
+ useEffect(() => {
+ router.actions.push(urls.replay(ReplayTabs.Playlists))
+ }, [])
+ return
+}
diff --git a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
index f2db148045646..0afa00a98d244 100644
--- a/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
+++ b/frontend/src/scenes/session-recordings/__mocks__/recording_events.json
@@ -1,6 +1,6 @@
[
{
- "id": "$pageview",
+ "id": "$pageview1",
"event": "$pageview",
"name": "$event_before_recording_starts",
"type": "events",
@@ -14,7 +14,7 @@
"elements_hash": ""
},
{
- "id": "$pageview",
+ "id": "$pageview2",
"name": "$pageview",
"event": "$pageview",
"type": "events",
diff --git a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
index fd023b710fc20..451f1cf616f8a 100644
--- a/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
+++ b/frontend/src/scenes/session-recordings/player/PlayerMetaLinks.tsx
@@ -4,17 +4,18 @@ import {
} from 'scenes/session-recordings/player/sessionRecordingPlayerLogic'
import { useActions, useValues } from 'kea'
import { LemonButton, LemonButtonProps } from 'lib/lemon-ui/LemonButton'
-import { IconComment, IconDelete, IconLink } from 'lib/lemon-ui/icons'
+import { IconComment, IconDelete, IconJournalPlus, IconLink } from 'lib/lemon-ui/icons'
import { openPlayerShareDialog } from 'scenes/session-recordings/player/share/PlayerShare'
import { PlaylistPopoverButton } from './playlist-popover/PlaylistPopover'
import { LemonDialog } from 'lib/lemon-ui/LemonDialog'
-import { NotebookAddButton } from 'scenes/notebooks/NotebookAddButton/NotebookAddButton'
+import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton'
import { NotebookNodeType } from '~/types'
-import { dayjs } from 'lib/dayjs'
+import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic'
export function PlayerMetaLinks(): JSX.Element {
const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic)
const { setPause, deleteRecording } = useActions(sessionRecordingPlayerLogic)
+ const nodeLogic = useNotebookNode()
const getCurrentPlayerTime = (): number => {
// NOTE: We pull this value at call time as otherwise it would trigger re-renders if pulled from the hook
@@ -55,12 +56,11 @@ export function PlayerMetaLinks(): JSX.Element {
{![SessionRecordingPlayerMode.Sharing].includes(mode) ? (
<>
-
}
resource={{ type: NotebookNodeType.Recording, attrs: { id: sessionRecordingId } }}
onClick={() => setPause()}
- newNotebookTitle={`Notes ${dayjs().format('DD/MM')}`}
onNotebookOpened={(theNotebookLogic, theNodeLogic) => {
const time = getCurrentPlayerTime() * 1000
@@ -74,15 +74,30 @@ export function PlayerMetaLinks(): JSX.Element {
}}
>
Comment
-
+
} onClick={onShare} {...commonProps}>
Share
-
- Pin
-
+ {nodeLogic ? (
+ nodeLogic.props.nodeType !== NotebookNodeType.Recording ? (
+
}
+ size="small"
+ onClick={() => {
+ nodeLogic.actions.insertAfter({
+ type: NotebookNodeType.Recording,
+ attrs: { id: sessionRecordingId },
+ })
+ }}
+ />
+ ) : null
+ ) : (
+
+ Pin
+
+ )}
{logicProps.playerKey !== 'modal' && (
([
if (nextSourceToLoad) {
actions.loadRecordingSnapshotsV2(nextSourceToLoad)
- } else {
- actions.reportUsageIfFullyLoaded()
}
},
loadRecordingSnapshotsV1Success: ({ sessionPlayerSnapshotData }) => {
@@ -254,8 +252,6 @@ export const sessionRecordingDataLogic = kea([
if (values.sessionPlayerSnapshotData?.next) {
actions.loadRecordingSnapshotsV1(values.sessionPlayerSnapshotData?.next)
- } else {
- actions.reportUsageIfFullyLoaded()
}
if (values.chunkPaginationIndex === 1 || values.loadedFromBlobStorage) {
// Not always accurate that recording is playable after first chunk is loaded, but good guesstimate for now
@@ -265,10 +261,12 @@ export const sessionRecordingDataLogic = kea([
size: (values.sessionPlayerSnapshotData?.snapshots ?? []).length,
duration: Math.round(performance.now() - cache.snapshotsStartTime),
}
-
- actions.reportViewed()
}
},
+ loadRecordingSnapshotsSuccess: () => {
+ actions.reportViewed()
+ actions.reportUsageIfFullyLoaded()
+ },
loadRecordingSnapshotsV1Failure: () => {
actions.loadRecordingSnapshotsFailure()
},
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
index 04464fc9a87da..00508be3ab649 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx
@@ -57,19 +57,32 @@ function UnusableEventsWarning(props: { unusableEventsInFilter: string[] }): JSX
)
}
+export type SessionRecordingsPlaylistProps = SessionRecordingListLogicProps & {
+ playlistShortId?: string
+ personUUID?: string
+ filters?: RecordingFilters
+ updateSearchParams?: boolean
+ onFiltersChange?: (filters: RecordingFilters) => void
+ autoPlay?: boolean
+ mode?: 'standard' | 'notebook'
+}
+
export function RecordingsLists({
playlistShortId,
personUUID,
filters: defaultFilters,
updateSearchParams,
+ ...props
}: SessionRecordingsPlaylistProps): JSX.Element {
- const logicProps = {
+ const logicProps: SessionRecordingListLogicProps = {
+ ...props,
playlistShortId,
personUUID,
filters: defaultFilters,
updateSearchParams,
}
const logic = sessionRecordingsListLogic(logicProps)
+
const {
filters,
hasNext,
@@ -244,11 +257,11 @@ export function RecordingsLists({
data-attr={'expand-replay-listing-from-default-seven-days-to-twenty-one'}
onClick={() => {
setFilters({
- date_from: '-21d',
+ date_from: '-30d',
})
}}
>
- Search over the last 21 days
+ Search over the last 30 days
>
) : (
@@ -285,33 +298,12 @@ export function RecordingsLists({
)
}
-export type SessionRecordingsPlaylistProps = {
- playlistShortId?: string
- personUUID?: string
- filters?: RecordingFilters
- updateSearchParams?: boolean
- onFiltersChange?: (filters: RecordingFilters) => void
- autoPlay?: boolean
- mode?: 'standard' | 'notebook'
-}
-
export function SessionRecordingsPlaylist(props: SessionRecordingsPlaylistProps): JSX.Element {
- const {
- playlistShortId,
- personUUID,
- filters: defaultFilters,
- updateSearchParams,
- onFiltersChange,
- autoPlay = true,
- } = props
+ const { playlistShortId } = props
const logicProps: SessionRecordingListLogicProps = {
- playlistShortId,
- personUUID,
- filters: defaultFilters,
- updateSearchParams,
- autoPlay,
- onFiltersChange,
+ ...props,
+ autoPlay: props.autoPlay ?? true,
}
const logic = sessionRecordingsListLogic(logicProps)
const {
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
index 99cb664cebc18..5d44e84618b36 100644
--- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
+++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsListLogic.ts
@@ -157,12 +157,8 @@ export const defaultPageviewPropertyEntityFilter = (
}
}
-export function generateSessionRecordingListLogicKey(props: SessionRecordingListLogicProps): string {
- return `${props.key}-${props.playlistShortId}-${props.personUUID}-${props.updateSearchParams ? '-with-search' : ''}`
-}
-
export interface SessionRecordingListLogicProps {
- key?: string
+ logicKey?: string
playlistShortId?: string
personUUID?: PersonUUID
filters?: RecordingFilters
@@ -174,7 +170,12 @@ export interface SessionRecordingListLogicProps {
export const sessionRecordingsListLogic = kea
([
path((key) => ['scenes', 'session-recordings', 'playlist', 'sessionRecordingsListLogic', key]),
props({} as SessionRecordingListLogicProps),
- key(generateSessionRecordingListLogicKey),
+ key(
+ (props: SessionRecordingListLogicProps) =>
+ `${props.logicKey}-${props.playlistShortId}-${props.personUUID}-${
+ props.updateSearchParams ? '-with-search' : ''
+ }`
+ ),
connect({
actions: [
eventUsageLogic,
diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx
index 6f72397bd4c54..d59ed4b674e69 100644
--- a/frontend/src/scenes/surveys/Survey.tsx
+++ b/frontend/src/scenes/surveys/Survey.tsx
@@ -60,7 +60,7 @@ export function SurveyComponent({ id }: { id?: string } = {}): JSX.Element {
export function SurveyForm({ id }: { id: string }): JSX.Element {
const { survey, surveyLoading, isEditingSurvey, hasTargetingFlag } = useValues(surveyLogic)
- const { loadSurvey, editingSurvey, setHasTargetingFlag } = useActions(surveyLogic)
+ const { loadSurvey, editingSurvey, setSurveyValue } = useActions(surveyLogic)
const { featureFlags } = useValues(enabledFeaturesLogic)
return (
@@ -374,7 +374,9 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
setHasTargetingFlag(true)}
+ onClick={() => {
+ setSurveyValue('targeting_flag_filters', { groups: [] })
+ }}
>
Add user targeting
@@ -389,7 +391,10 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
type="secondary"
status="danger"
className="w-max"
- onClick={() => setHasTargetingFlag(false)}
+ onClick={() => {
+ setSurveyValue('targeting_flag_filters', undefined)
+ setSurveyValue('targeting_flag', null)
+ }}
>
Remove all user properties
diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx
index dfe7de4895a4b..3b0b46e89ca94 100644
--- a/frontend/src/scenes/surveys/SurveyView.tsx
+++ b/frontend/src/scenes/surveys/SurveyView.tsx
@@ -10,27 +10,22 @@ import { capitalizeFirstLetter } from 'lib/utils'
import { useState, useEffect } from 'react'
import { pluginsLogic } from 'scenes/plugins/pluginsLogic'
import { Query } from '~/queries/Query/Query'
-import { defaultSurveyAppearance, surveyLogic } from './surveyLogic'
+import { defaultSurveyAppearance, surveyEventName, surveyLogic } from './surveyLogic'
import { surveysLogic } from './surveysLogic'
import { PageHeader } from 'lib/components/PageHeader'
import { SurveyReleaseSummary } from './Survey'
import { SurveyAppearance } from './SurveyAppearance'
-import { SurveyQuestionType, SurveyType } from '~/types'
+import { PropertyFilterType, PropertyOperator, Survey, SurveyQuestionType, SurveyType } from '~/types'
import { SurveyAPIEditor } from './SurveyAPIEditor'
import { LemonBanner } from 'lib/lemon-ui/LemonBanner'
import { IconOpenInNew } from 'lib/lemon-ui/icons'
import { NodeKind } from '~/queries/schema'
+import { dayjs } from 'lib/dayjs'
+import { FEATURE_FLAGS } from 'lib/constants'
+import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
export function SurveyView({ id }: { id: string }): JSX.Element {
- const {
- survey,
- dataTableQuery,
- surveyLoading,
- surveyPlugin,
- surveyMetricsQueries,
- surveyDataVizQuery,
- showSurveyAppWarning,
- } = useValues(surveyLogic)
+ const { survey, surveyLoading, surveyPlugin, showSurveyAppWarning } = useValues(surveyLogic)
// TODO: survey results logic
// const { surveyImpressionsCount, surveyStartedCount, surveyCompletedCount } = useValues(surveyResultsLogic)
const { editingSurvey, updateSurvey, launchSurvey, stopSurvey, archiveSurvey, resumeSurvey } =
@@ -134,48 +129,7 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
? {
content: (
- {surveyMetricsQueries && (
-
- )}
- {survey.questions[0].type === SurveyQuestionType.Rating && (
-
-
-
- )}
- {(survey.questions[0].type === SurveyQuestionType.SingleChoice ||
- survey.questions[0].type === SurveyQuestionType.MultipleChoice) && (
-
- {survey.questions[0].type === SurveyQuestionType.SingleChoice ? (
-
- ) : (
-
- )}
-
- )}
- {surveyLoading ?
:
}
+
),
key: 'results',
@@ -305,7 +259,124 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
)
}
+export function SurveyResult({ disableEventsTable }: { disableEventsTable?: boolean }): JSX.Element {
+ const {
+ survey,
+ dataTableQuery,
+ surveyLoading,
+ surveyMetricsQueries,
+ surveyRatingQuery,
+ surveyMultipleChoiceQuery,
+ } = useValues(surveyLogic)
+ const { featureFlags } = useValues(featureFlagLogic)
+
+ return (
+ <>
+ {surveyMetricsQueries && (
+
+ )}
+ {survey.questions[0].type === SurveyQuestionType.Rating && (
+
+
+ {featureFlags[FEATURE_FLAGS.SURVEY_NPS_RESULTS] && survey.questions[0].scale === 10 && (
+ <>
+
+
NPS Score
+
+ >
+ )}
+
+ )}
+ {(survey.questions[0].type === SurveyQuestionType.SingleChoice ||
+ survey.questions[0].type === SurveyQuestionType.MultipleChoice) && (
+
+
+
+ )}
+ {!disableEventsTable && (surveyLoading ? : )}
+ >
+ )
+}
+
const OPT_IN_SNIPPET = `posthog.init('YOUR_PROJECT_API_KEY', {
api_host: 'YOUR API HOST',
opt_in_site_apps: true // <--- Add this line
})`
+
+function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element {
+ return (
+
+ )
+}
diff --git a/frontend/src/scenes/surveys/Surveys.tsx b/frontend/src/scenes/surveys/Surveys.tsx
index 3d5fc423f40c7..3f18b2df4e154 100644
--- a/frontend/src/scenes/surveys/Surveys.tsx
+++ b/frontend/src/scenes/surveys/Surveys.tsx
@@ -112,17 +112,7 @@ export function Surveys(): JSX.Element {
title: 'Status',
width: 100,
render: function Render(_, survey: Survey) {
- const statusColors = {
- running: 'success',
- draft: 'default',
- complete: 'completion',
- } as Record
- const status = getSurveyStatus(survey)
- return (
-
- {status.toUpperCase()}
-
- )
+ return
},
},
{
@@ -243,3 +233,17 @@ export function Surveys(): JSX.Element {
)
}
+
+export function StatusTag({ survey }: { survey: Survey }): JSX.Element {
+ const statusColors = {
+ running: 'success',
+ draft: 'default',
+ complete: 'completion',
+ } as Record
+ const status = getSurveyStatus(survey)
+ return (
+
+ {status.toUpperCase()}
+
+ )
+}
diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx
index 4e935af2022b6..b482f8e794949 100644
--- a/frontend/src/scenes/surveys/surveyLogic.tsx
+++ b/frontend/src/scenes/surveys/surveyLogic.tsx
@@ -27,7 +27,6 @@ import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic'
export interface NewSurvey
extends Pick<
Survey,
- | 'id'
| 'name'
| 'description'
| 'type'
@@ -40,6 +39,7 @@ export interface NewSurvey
| 'archived'
| 'appearance'
> {
+ id: 'new'
linked_flag_id: number | undefined
targeting_flag_filters: Pick | undefined
}
@@ -55,7 +55,7 @@ export const defaultSurveyAppearance = {
thankYouMessageHeader: 'Thank you for your feedback!',
}
-const NEW_SURVEY: NewSurvey = {
+export const NEW_SURVEY: NewSurvey = {
id: 'new',
name: '',
description: '',
@@ -76,73 +76,6 @@ export const surveyEventName = 'survey sent'
const SURVEY_RESPONSE_PROPERTY = '$survey_response'
-export const getSurveyDataQuery = (survey: Survey): DataTableNode => {
- const surveyDataQuery: DataTableNode = {
- kind: NodeKind.DataTableNode,
- source: {
- kind: NodeKind.EventsQuery,
- select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'],
- orderBy: ['timestamp DESC'],
- where: [`event == 'survey sent' or event == '${survey.name} survey sent'`],
- after: survey.created_at,
- properties: [
- {
- type: PropertyFilterType.Event,
- key: '$survey_id',
- operator: PropertyOperator.Exact,
- value: survey.id,
- },
- ],
- },
- propertiesViaUrl: true,
- showExport: true,
- showReload: true,
- showEventFilter: true,
- showPropertyFilter: true,
- }
- return surveyDataQuery
-}
-
-export const getSurveyMetricsQueries = (surveyId: string): SurveyMetricsQueries => {
- const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'`
- const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'`
- return {
- surveysShown: {
- kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysShownHogqlQuery },
- },
- surveysDismissed: {
- kind: NodeKind.DataTableNode,
- source: { kind: NodeKind.HogQLQuery, query: surveysDismissedHogqlQuery },
- },
- }
-}
-
-export const getSurveyDataVizQuery = (survey: Survey): InsightVizNode => {
- return {
- kind: NodeKind.InsightVizNode,
- source: {
- kind: NodeKind.TrendsQuery,
- dateRange: {
- date_from: dayjs(survey.created_at).format('YYYY-MM-DD'),
- date_to: dayjs().format('YYYY-MM-DD'),
- },
- properties: [
- {
- type: PropertyFilterType.Event,
- key: '$survey_id',
- operator: PropertyOperator.Exact,
- value: survey.id,
- },
- ],
- series: [{ event: surveyEventName, kind: NodeKind.EventsNode }],
- trendsFilter: { display: ChartDisplayType.ActionsBarValue },
- breakdown: { breakdown: '$survey_response', breakdown_type: 'event' },
- },
- showTable: true,
- }
-}
-
export interface SurveyLogicProps {
id: string | 'new'
}
@@ -153,9 +86,9 @@ export interface SurveyMetricsQueries {
}
export const surveyLogic = kea([
- path(['scenes', 'surveys', 'surveyLogic']),
props({} as SurveyLogicProps),
key(({ id }) => id),
+ path((key) => ['scenes', 'surveys', 'surveyLogic', key]),
connect(() => ({
actions: [
surveysLogic,
@@ -179,10 +112,6 @@ export const surveyLogic = kea([
stopSurvey: true,
archiveSurvey: true,
resumeSurvey: true,
- setDataTableQuery: (query: DataTableNode) => ({ query }),
- setSurveyMetricsQueries: (surveyMetricsQueries: SurveyMetricsQueries) => ({ surveyMetricsQueries }),
- setSurveyDataVizQuery: (surveyDataVizQuery: InsightVizNode) => ({ surveyDataVizQuery }),
- setHasTargetingFlag: (hasTargetingFlag: boolean) => ({ hasTargetingFlag }),
}),
loaders(({ props, actions }) => ({
survey: {
@@ -213,16 +142,6 @@ export const surveyLogic = kea([
},
})),
listeners(({ actions }) => ({
- loadSurveySuccess: ({ survey }) => {
- if (survey.start_date && survey.id !== 'new') {
- actions.setDataTableQuery(getSurveyDataQuery(survey as Survey))
- actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id))
- actions.setSurveyDataVizQuery(getSurveyDataVizQuery(survey as Survey))
- }
- if (survey.targeting_flag) {
- actions.setHasTargetingFlag(true)
- }
- },
createSurveySuccess: ({ survey }) => {
lemonToast.success(<>Survey {survey.name} created>)
actions.loadSurveys()
@@ -237,8 +156,6 @@ export const surveyLogic = kea([
},
launchSurveySuccess: ({ survey }) => {
lemonToast.success(<>Survey {survey.name} launched>)
- actions.setSurveyMetricsQueries(getSurveyMetricsQueries(survey.id))
- actions.setDataTableQuery(getSurveyDataQuery(survey))
actions.loadSurveys()
actions.reportSurveyLaunched(survey)
},
@@ -261,30 +178,6 @@ export const surveyLogic = kea([
editingSurvey: (_, { editing }) => editing,
},
],
- dataTableQuery: [
- null as DataTableNode | null,
- {
- setDataTableQuery: (_, { query }) => query,
- },
- ],
- surveyMetricsQueries: [
- null as SurveyMetricsQueries | null,
- {
- setSurveyMetricsQueries: (_, { surveyMetricsQueries }) => surveyMetricsQueries,
- },
- ],
- surveyDataVizQuery: [
- null as InsightVizNode | null,
- {
- setSurveyDataVizQuery: (_, { surveyDataVizQuery }) => surveyDataVizQuery,
- },
- ],
- hasTargetingFlag: [
- false,
- {
- setHasTargetingFlag: (_, { hasTargetingFlag }) => hasTargetingFlag,
- },
- ],
}),
selectors({
isSurveyRunning: [
@@ -320,6 +213,139 @@ export const surveyLogic = kea([
)
},
],
+ dataTableQuery: [
+ (s) => [s.survey],
+ (survey): DataTableNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+ const createdAt = (survey as Survey).created_at
+
+ return {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.EventsQuery,
+ select: ['*', `properties.${SURVEY_RESPONSE_PROPERTY}`, 'timestamp', 'person'],
+ orderBy: ['timestamp DESC'],
+ where: [`event == 'survey sent' or event == '${survey.name} survey sent'`],
+ after: createdAt,
+ properties: [
+ {
+ type: PropertyFilterType.Event,
+ key: '$survey_id',
+ operator: PropertyOperator.Exact,
+ value: survey.id,
+ },
+ ],
+ },
+ propertiesViaUrl: true,
+ showExport: true,
+ showReload: true,
+ showEventFilter: true,
+ showPropertyFilter: true,
+ showTimings: false,
+ }
+ },
+ ],
+ surveyMetricsQueries: [
+ (s) => [s.survey],
+ (survey): SurveyMetricsQueries | null => {
+ const surveyId = survey.id
+ if (surveyId === 'new') {
+ return null
+ }
+ const createdAt = (survey as Survey).created_at
+
+ const surveysShownHogqlQuery = `select count(distinct person.id) as 'survey shown' from events where event == 'survey shown' and properties.$survey_id == '${surveyId}'`
+ const surveysDismissedHogqlQuery = `select count(distinct person.id) as 'survey dismissed' from events where event == 'survey dismissed' and properties.$survey_id == '${surveyId}'`
+ return {
+ surveysShown: {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysShownHogqlQuery,
+ filters: { dateRange: { date_from: dayjs(createdAt).format('YYYY-MM-DD') } },
+ },
+ showTimings: false,
+ },
+ surveysDismissed: {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query: surveysDismissedHogqlQuery,
+ filters: { dateRange: { date_from: dayjs(createdAt).format('YYYY-MM-DD') } },
+ },
+ showTimings: false,
+ },
+ }
+ },
+ ],
+ surveyRatingQuery: [
+ (s) => [s.survey],
+ (survey): InsightVizNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+ const createdAt = (survey as Survey).created_at
+
+ return {
+ kind: NodeKind.InsightVizNode,
+ source: {
+ kind: NodeKind.TrendsQuery,
+ dateRange: {
+ date_from: dayjs(createdAt).format('YYYY-MM-DD'),
+ date_to: dayjs().format('YYYY-MM-DD'),
+ },
+ properties: [
+ {
+ type: PropertyFilterType.Event,
+ key: '$survey_id',
+ operator: PropertyOperator.Exact,
+ value: survey.id,
+ },
+ ],
+ series: [{ event: surveyEventName, kind: NodeKind.EventsNode }],
+ trendsFilter: { display: ChartDisplayType.ActionsBarValue },
+ breakdown: { breakdown: '$survey_response', breakdown_type: 'event' },
+ },
+ showTable: true,
+ }
+ },
+ ],
+ surveyMultipleChoiceQuery: [
+ (s) => [s.survey],
+ (survey): DataTableNode | null => {
+ if (survey.id === 'new') {
+ return null
+ }
+ const createdAt = (survey as Survey).created_at
+
+ const singleChoiceQuery = `select count(), properties.$survey_response as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc`
+ const multipleChoiceQuery = `select count(), arrayJoin(JSONExtractArrayRaw(properties, '$survey_response')) as choice from events where event == 'survey sent' and properties.$survey_id == '${survey.id}' group by choice order by count() desc`
+ return {
+ kind: NodeKind.DataTableNode,
+ source: {
+ kind: NodeKind.HogQLQuery,
+ query:
+ survey.questions[0].type === SurveyQuestionType.SingleChoice
+ ? singleChoiceQuery
+ : multipleChoiceQuery,
+ filters: {
+ dateRange: {
+ date_from: dayjs(createdAt).format('YYYY-MM-DD'),
+ },
+ },
+ },
+ showTimings: false,
+ }
+ },
+ ],
+ hasTargetingFlag: [
+ (s) => [s.survey],
+ (survey): boolean => {
+ return !!survey.targeting_flag || !!(survey.id === 'new' && survey.targeting_flag_filters)
+ },
+ ],
}),
forms(({ actions, props, values }) => ({
survey: {
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index 2450187f8303d..d933d06f4e196 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -185,5 +185,4 @@ export const urls = {
tab: 'notebooks',
}).url,
notebook: (shortId: string): string => `/notebooks/${shortId}`,
- notebookEdit: (shortId: string): string => `/notebooks/${shortId}/edit`,
}
diff --git a/frontend/src/styles/utilities.scss b/frontend/src/styles/utilities.scss
index 126d981427e89..745375f1c3f57 100644
--- a/frontend/src/styles/utilities.scss
+++ b/frontend/src/styles/utilities.scss
@@ -919,6 +919,13 @@ $decorations: underline, overline, line-through, no-underline;
}
}
+.list-inside {
+ list-style-position: inside;
+}
+.list-outside {
+ list-style-position: outside;
+}
+
.shadow {
box-shadow: var(--shadow-elevation);
}
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index cf091c4c88296..f4413d151bcdc 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -2191,6 +2191,7 @@ export interface FeatureFlagType extends Omit
+ type: NotebookNodeType
+}
+
export enum NotebookTarget {
Popover = 'popover',
Auto = 'auto',
@@ -3093,6 +3097,8 @@ export type BatchExportDestinationS3 = {
aws_secret_access_key: string
exclude_events: string[]
compression: string | null
+ encryption: string | null
+ kms_key_id: string | null
}
}
diff --git a/latest_migrations.manifest b/latest_migrations.manifest
index 84d604bfc1357..233b3d446d5cb 100644
--- a/latest_migrations.manifest
+++ b/latest_migrations.manifest
@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0015_add_verified_properties
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
-posthog: 0347_add_bigquery_export_type
+posthog: 0350_add_notebook_text_content
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019
diff --git a/package.json b/package.json
index 1fee283b008d2..3f8131541f4c5 100644
--- a/package.json
+++ b/package.json
@@ -73,7 +73,7 @@
"@monaco-editor/react": "4.4.6",
"@posthog/plugin-scaffold": "^1.3.2",
"@react-hook/size": "^2.1.2",
- "@rrweb/types": "^2.0.0-alpha.9",
+ "@rrweb/types": "^2.0.0-alpha.11",
"@sentry/react": "7.22.0",
"@testing-library/dom": ">=7.21.4",
"@tiptap/core": "^2.1.0-rc.12",
@@ -125,7 +125,8 @@
"kea-window-values": "^3.0.0",
"md5": "^2.3.0",
"monaco-editor": "^0.39.0",
- "posthog-js": "1.78.1",
+ "papaparse": "^5.4.1",
+ "posthog-js": "1.78.5",
"posthog-js-lite": "2.0.0-alpha5",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
@@ -154,7 +155,7 @@
"react-virtualized": "^9.22.5",
"require-from-string": "^2.0.2",
"resize-observer-polyfill": "^1.5.1",
- "rrweb": "^2.0.0-alpha.9",
+ "rrweb": "^2.0.0-alpha.11",
"sass": "^1.26.2",
"use-debounce": "^9.0.3",
"use-resize-observer": "^8.0.0",
@@ -206,6 +207,7 @@
"@types/jest-image-snapshot": "^6.1.0",
"@types/md5": "^2.3.0",
"@types/node": "^18.11.9",
+ "@types/papaparse": "^5.3.8",
"@types/pixelmatch": "^5.2.4",
"@types/pngjs": "^6.0.1",
"@types/query-selector-shadow-dom": "^1.0.0",
diff --git a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png
index 0e709cd227beb..ab79c58ee2eaf 100644
Binary files a/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png and b/playwright/e2e-vrt/layout/Navigation.spec.ts-snapshots/Navigation-App-Page-With-Side-Bar-Hidden-Mobile-1-chromium-linux.png differ
diff --git a/plugin-server/functional_tests/webhooks.test.ts b/plugin-server/functional_tests/webhooks.test.ts
index 0fb7155790034..82f1bfe9bf186 100644
--- a/plugin-server/functional_tests/webhooks.test.ts
+++ b/plugin-server/functional_tests/webhooks.test.ts
@@ -199,6 +199,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
properties: {
$creator_event_uuid: eventUuid,
$initial_current_url: 'http://localhost:8000',
+ $current_url: 'http://localhost:8000',
email: 't@t.com',
},
uuid: expect.any(String),
@@ -208,6 +209,7 @@ test.concurrent(`webhooks: fires zapier REST webhook`, async () => {
$sent_at: expect.any(String),
$set: {
email: 't@t.com',
+ $current_url: 'http://localhost:8000',
},
$set_once: {
$initial_current_url: 'http://localhost:8000',
diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts
index ef98937b81bff..9cecab54d8c4a 100644
--- a/plugin-server/src/config/config.ts
+++ b/plugin-server/src/config/config.ts
@@ -44,6 +44,7 @@ export function getDefaultConfig(): PluginsServerConfig {
KAFKA_SASL_PASSWORD: undefined,
KAFKA_CLIENT_RACK: undefined,
KAFKA_CONSUMPTION_USE_RDKAFKA: false, // Transitional setting, ignored for consumers that only support one library
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: true, // If true, use the cooperative rebalance strategy, otherwise uses the default ('range,roundrobin')
KAFKA_CONSUMPTION_MAX_BYTES: 10_485_760, // Default value for kafkajs
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: 1_048_576, // Default value for kafkajs, must be bigger than message size
KAFKA_CONSUMPTION_MAX_WAIT_MS: 1_000, // Down from the 5s default for kafkajs
@@ -116,6 +117,7 @@ export function getDefaultConfig(): PluginsServerConfig {
OBJECT_STORAGE_SECRET_ACCESS_KEY: 'object_storage_root_password',
OBJECT_STORAGE_BUCKET: 'posthog',
PLUGIN_SERVER_MODE: null,
+ PLUGIN_LOAD_SEQUENTIALLY: false,
KAFKAJS_LOG_LEVEL: 'WARN',
HISTORICAL_EXPORTS_ENABLED: true,
HISTORICAL_EXPORTS_MAX_RETRY_COUNT: 15,
@@ -126,6 +128,12 @@ export function getDefaultConfig(): PluginsServerConfig {
USE_KAFKA_FOR_SCHEDULED_TASKS: true,
CLOUD_DEPLOYMENT: 'default', // Used as a Sentry tag
+ STARTUP_PROFILE_DURATION_SECONDS: 300, // 5 minutes
+ STARTUP_PROFILE_CPU: false,
+ STARTUP_PROFILE_HEAP: false,
+ STARTUP_PROFILE_HEAP_INTERVAL: 512 * 1024, // default v8 value
+ STARTUP_PROFILE_HEAP_DEPTH: 16, // default v8 value
+
SESSION_RECORDING_KAFKA_HOSTS: undefined,
SESSION_RECORDING_KAFKA_SECURITY_PROTOCOL: undefined,
SESSION_RECORDING_KAFKA_BATCH_SIZE: 500,
diff --git a/plugin-server/src/kafka/batch-consumer.ts b/plugin-server/src/kafka/batch-consumer.ts
index a82aed8861098..03c9e2de6db37 100644
--- a/plugin-server/src/kafka/batch-consumer.ts
+++ b/plugin-server/src/kafka/batch-consumer.ts
@@ -1,11 +1,12 @@
import { GlobalConfig, KafkaConsumer, Message } from 'node-rdkafka-acosom'
-import { exponentialBuckets, Histogram } from 'prom-client'
+import { exponentialBuckets, Gauge, Histogram } from 'prom-client'
import { status } from '../utils/status'
import { createAdminClient, ensureTopicExists } from './admin'
import {
commitOffsetsForMessages,
consumeMessages,
+ countPartitionsPerTopic,
createKafkaConsumer,
disconnectConsumer,
instrumentConsumerMetrics,
@@ -32,6 +33,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs,
eachBatch,
autoCommit = true,
+ cooperativeRebalance = true,
queuedMinMessages = 100000,
}: {
connectionConfig: GlobalConfig
@@ -47,6 +49,7 @@ export const startBatchConsumer = async ({
topicCreationTimeoutMs: number
eachBatch: (messages: Message[]) => Promise
autoCommit?: boolean
+ cooperativeRebalance?: boolean
queuedMinMessages?: number
}): Promise => {
// Starts consuming from `topic` in batches of `fetchBatchSize` messages,
@@ -112,12 +115,12 @@ export const startBatchConsumer = async ({
// https://www.confluent.io/en-gb/blog/incremental-cooperative-rebalancing-in-kafka/
// for details on the advantages of this rebalancing strategy as well as
// how it works.
- 'partition.assignment.strategy': 'cooperative-sticky',
+ 'partition.assignment.strategy': cooperativeRebalance ? 'cooperative-sticky' : 'range,roundrobin',
rebalance_cb: true,
offset_commit_cb: true,
})
- instrumentConsumerMetrics(consumer, groupId)
+ instrumentConsumerMetrics(consumer, groupId, cooperativeRebalance)
let isShuttingDown = false
let lastLoopTime = Date.now()
@@ -181,6 +184,10 @@ export const startBatchConsumer = async ({
continue
}
+ for (const [topic, count] of countPartitionsPerTopic(consumer.assignments())) {
+ kafkaAbsolutePartitionCount.labels({ topic }).set(count)
+ }
+
status.debug('🔁', 'main_loop_consumed', { messagesLength: messages.length })
if (!messages.length) {
status.debug('🔁', 'main_loop_empty_batch', { cause: 'empty' })
@@ -278,3 +285,9 @@ const consumedMessageSizeBytes = new Histogram({
labelNames: ['topic', 'groupId', 'messageType'],
buckets: exponentialBuckets(1, 8, 4).map((bucket) => bucket * 1024),
})
+
+const kafkaAbsolutePartitionCount = new Gauge({
+ name: 'kafka_absolute_partition_count',
+ help: 'Number of partitions assigned to this consumer. (Absolute value from the consumer state.)',
+ labelNames: ['topic'],
+})
diff --git a/plugin-server/src/kafka/consumer.ts b/plugin-server/src/kafka/consumer.ts
index f3b3a91d2be44..62b8e951ebc9f 100644
--- a/plugin-server/src/kafka/consumer.ts
+++ b/plugin-server/src/kafka/consumer.ts
@@ -1,4 +1,5 @@
import {
+ Assignment,
ClientMetrics,
CODES,
ConsumerGlobalConfig,
@@ -9,7 +10,7 @@ import {
TopicPartitionOffset,
} from 'node-rdkafka-acosom'
-import { latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics'
+import { kafkaRebalancePartitionCount, latestOffsetTimestampGauge } from '../main/ingestion-queues/metrics'
import { status } from '../utils/status'
export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => {
@@ -54,7 +55,24 @@ export const createKafkaConsumer = async (config: ConsumerGlobalConfig) => {
})
})
}
-export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: string) => {
+
+export function countPartitionsPerTopic(assignments: Assignment[]): Map {
+ const partitionsPerTopic = new Map()
+ for (const assignment of assignments) {
+ if (partitionsPerTopic.has(assignment.topic)) {
+ partitionsPerTopic.set(assignment.topic, partitionsPerTopic.get(assignment.topic) + 1)
+ } else {
+ partitionsPerTopic.set(assignment.topic, 1)
+ }
+ }
+ return partitionsPerTopic
+}
+
+export const instrumentConsumerMetrics = (
+ consumer: RdKafkaConsumer,
+ groupId: string,
+ cooperativeRebalance: boolean
+) => {
// For each message consumed, we record the latest timestamp processed for
// each partition assigned to this consumer group member. This consumer
// should only provide metrics for the partitions that are assigned to it,
@@ -79,6 +97,7 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
//
// TODO: add other relevant metrics here
// TODO: expose the internal librdkafka metrics as well.
+ const strategyString = cooperativeRebalance ? 'cooperative' : 'eager'
consumer.on('rebalance', (error: LibrdKafkaError, assignments: TopicPartition[]) => {
/**
* see https://github.com/Blizzard/node-rdkafka#rebalancing errors are used to signal
@@ -88,9 +107,23 @@ export const instrumentConsumerMetrics = (consumer: RdKafkaConsumer, groupId: st
* And when the balancing is completed the new assignments are received with ERR__ASSIGN_PARTITIONS
*/
if (error.code === CODES.ERRORS.ERR__ASSIGN_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance, partitions assigned', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance, partitions assigned`, { assignments })
+ for (const [topic, count] of countPartitionsPerTopic(assignments)) {
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).inc(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
+ }
} else if (error.code === CODES.ERRORS.ERR__REVOKE_PARTITIONS) {
- status.info('📝️', 'librdkafka rebalance started, partitions revoked', { assignments })
+ status.info('📝️', `librdkafka ${strategyString} rebalance started, partitions revoked`, { assignments })
+ for (const [topic, count] of countPartitionsPerTopic(assignments)) {
+ if (cooperativeRebalance) {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).dec(count)
+ } else {
+ kafkaRebalancePartitionCount.labels({ topic: topic }).set(count)
+ }
+ }
} else {
// We had a "real" error
status.error('⚠️', 'rebalance_error', { error })
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
index a97d034778ac4..4d12925f0ce6b 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-onevent.ts
@@ -4,20 +4,18 @@ import { EachBatchPayload, KafkaMessage } from 'kafkajs'
import { RawClickHouseEvent } from '../../../types'
import { convertToIngestionEvent } from '../../../utils/event'
import { status } from '../../../utils/status'
-import { groupIntoBatches } from '../../../utils/utils'
import { runInstrumentedFunction } from '../../utils'
import { KafkaJSIngestionConsumer } from '../kafka-queue'
import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
+import { eachBatchHandlerHelper } from './each-batch-webhooks'
// Must require as `tsc` strips unused `import` statements and just requiring this seems to init some globals
require('@sentry/tracing')
export async function eachMessageAppsOnEventHandlers(
- message: KafkaMessage,
+ clickHouseEvent: RawClickHouseEvent,
queue: KafkaJSIngestionConsumer
): Promise {
- const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
-
const pluginConfigs = queue.pluginsServer.pluginConfigsPerTeam.get(clickHouseEvent.team_id)
if (pluginConfigs) {
// Elements parsing can be extremely slow, so we skip it for some plugins
@@ -50,7 +48,14 @@ export async function eachBatchAppsOnEventHandlers(
payload: EachBatchPayload,
queue: KafkaJSIngestionConsumer
): Promise {
- await eachBatch(payload, queue, eachMessageAppsOnEventHandlers, groupIntoBatches, 'async_handlers_on_event')
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => queue.pluginsServer.pluginConfigsPerTeam.has(teamId),
+ (event) => eachMessageAppsOnEventHandlers(event, queue),
+ queue.pluginsServer.statsd,
+ queue.pluginsServer.WORKER_CONCURRENCY * queue.pluginsServer.TASKS_PER_WORKER,
+ 'on_event'
+ )
}
export async function eachBatch(
diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
index 427297a613b1b..fb671f0cd9633 100644
--- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
+++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-webhooks.ts
@@ -17,10 +17,10 @@ import { eventDroppedCounter, latestOffsetTimestampGauge } from '../metrics'
require('@sentry/tracing')
// exporting only for testing
-export function groupIntoBatchesWebhooks(
+export function groupIntoBatchesByUsage(
array: KafkaMessage[],
batchSize: number,
- actionMatcher: ActionMatcher
+ shouldProcess: (teamId: number) => boolean
): { eventBatch: RawClickHouseEvent[]; lastOffset: string; lastTimestamp: string }[] {
// Most events will not trigger a webhook call, so we want to filter them out as soon as possible
// to achieve the highest effective concurrency when executing the actual HTTP calls.
@@ -32,7 +32,7 @@ export function groupIntoBatchesWebhooks(
let currentCount = 0
array.forEach((message, index) => {
const clickHouseEvent = JSON.parse(message.value!.toString()) as RawClickHouseEvent
- if (actionMatcher.hasWebhooks(clickHouseEvent.team_id)) {
+ if (shouldProcess(clickHouseEvent.team_id)) {
currentBatch.push(clickHouseEvent)
currentCount++
} else {
@@ -58,18 +58,36 @@ export async function eachBatchWebhooksHandlers(
hookCannon: HookCommander,
statsd: StatsD | undefined,
concurrency: number
+): Promise {
+ await eachBatchHandlerHelper(
+ payload,
+ (teamId) => actionMatcher.hasWebhooks(teamId),
+ (event) => eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd),
+ statsd,
+ concurrency,
+ 'webhooks'
+ )
+}
+
+export async function eachBatchHandlerHelper(
+ payload: EachBatchPayload,
+ shouldProcess: (teamId: number) => boolean,
+ eachMessageHandler: (event: RawClickHouseEvent) => Promise,
+ statsd: StatsD | undefined,
+ concurrency: number,
+ stats_key: string
): Promise {
// similar to eachBatch function in each-batch.ts, but without the dependency on the KafkaJSIngestionConsumer
// & handling the different batching return type
- const key = 'async_handlers_webhooks'
+ const key = `async_handlers_${stats_key}`
const batchStartTimer = new Date()
const loggingKey = `each_batch_${key}`
const { batch, resolveOffset, heartbeat, commitOffsetsIfNecessary, isRunning, isStale }: EachBatchPayload = payload
- const transaction = Sentry.startTransaction({ name: `eachBatchWebhooks` })
+ const transaction = Sentry.startTransaction({ name: `eachBatch${stats_key}` })
try {
- const batchesWithOffsets = groupIntoBatchesWebhooks(batch.messages, concurrency, actionMatcher)
+ const batchesWithOffsets = groupIntoBatchesByUsage(batch.messages, concurrency, shouldProcess)
statsd?.histogram('ingest_event_batching.input_length', batch.messages.length, { key: key })
statsd?.histogram('ingest_event_batching.batch_count', batchesWithOffsets.length, { key: key })
@@ -88,9 +106,7 @@ export async function eachBatchWebhooksHandlers(
}
await Promise.all(
- eventBatch.map((event: RawClickHouseEvent) =>
- eachMessageWebhooksHandlers(event, actionMatcher, hookCannon, statsd).finally(() => heartbeat())
- )
+ eventBatch.map((event: RawClickHouseEvent) => eachMessageHandler(event).finally(() => heartbeat()))
)
resolveOffset(lastOffset)
diff --git a/plugin-server/src/main/ingestion-queues/kafka-queue.ts b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
index da51173e0507f..7989efd4b356a 100644
--- a/plugin-server/src/main/ingestion-queues/kafka-queue.ts
+++ b/plugin-server/src/main/ingestion-queues/kafka-queue.ts
@@ -255,6 +255,7 @@ export class IngestionConsumer {
consumerMaxWaitMs: this.pluginsServer.KAFKA_CONSUMPTION_MAX_WAIT_MS,
fetchBatchSize: 500,
topicCreationTimeoutMs: this.pluginsServer.KAFKA_TOPIC_CREATION_TIMEOUT_MS,
+ cooperativeRebalance: this.pluginsServer.KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE,
eachBatch: (payload) => this.eachBatchConsumer(payload),
})
this.consumerReady = true
diff --git a/plugin-server/src/main/ingestion-queues/metrics.ts b/plugin-server/src/main/ingestion-queues/metrics.ts
index 97188247cbefa..099832e1ea14c 100644
--- a/plugin-server/src/main/ingestion-queues/metrics.ts
+++ b/plugin-server/src/main/ingestion-queues/metrics.ts
@@ -2,6 +2,12 @@
import { Counter, Gauge } from 'prom-client'
+export const kafkaRebalancePartitionCount = new Gauge({
+ name: 'kafka_rebalance_partition_count',
+ help: 'Number of partitions assigned to this consumer. (Calculated during rebalance events.)',
+ labelNames: ['topic'],
+})
+
export const latestOffsetTimestampGauge = new Gauge({
name: 'latest_processed_timestamp_ms',
help: 'Timestamp of the latest offset that has been committed.',
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
index bf0a242496fd3..c9dacf1fabdef 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/services/replay-events-ingester.ts
@@ -105,16 +105,6 @@ export class ReplayEventsIngester {
return drop('producer_not_ready')
}
- if (event.replayIngestionConsumer !== 'v2') {
- eventDroppedCounter
- .labels({
- event_type: 'session_recordings_replay_events',
- drop_cause: 'not_target_consumer',
- })
- .inc()
- return
- }
-
if (
await this.offsetHighWaterMarker.isBelowHighWaterMark(
event.metadata,
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
index 3a6e9b291c602..8e0473df357fe 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/session-recordings-consumer-v2.ts
@@ -279,7 +279,6 @@ export class SessionRecordingIngesterV2 {
session_id: event.properties?.$session_id,
window_id: event.properties?.$window_id,
events: event.properties.$snapshot_items,
- replayIngestionConsumer: event.properties?.$snapshot_consumer ?? 'v1',
}
return recordingMessage
diff --git a/plugin-server/src/main/ingestion-queues/session-recording/types.ts b/plugin-server/src/main/ingestion-queues/session-recording/types.ts
index c29c1ad81f1db..6bff13bbde468 100644
--- a/plugin-server/src/main/ingestion-queues/session-recording/types.ts
+++ b/plugin-server/src/main/ingestion-queues/session-recording/types.ts
@@ -14,8 +14,6 @@ export type IncomingRecordingMessage = {
session_id: string
window_id?: string
events: RRWebEvent[]
- // NOTE: This is only for migrating from one consumer to the other
- replayIngestionConsumer: 'v1' | 'v2'
}
// This is the incoming message from Kafka
diff --git a/plugin-server/src/main/pluginsServer.ts b/plugin-server/src/main/pluginsServer.ts
index a89355cb51c98..6d3f32638ce64 100644
--- a/plugin-server/src/main/pluginsServer.ts
+++ b/plugin-server/src/main/pluginsServer.ts
@@ -1,10 +1,12 @@
import * as Sentry from '@sentry/node'
+import fs from 'fs'
import { Server } from 'http'
import { CompressionCodecs, CompressionTypes, Consumer, KafkaJSProtocolError } from 'kafkajs'
// @ts-expect-error no type definitions
import SnappyCodec from 'kafkajs-snappy'
import * as schedule from 'node-schedule'
import { Counter } from 'prom-client'
+import v8Profiler from 'v8-profiler-next'
import { getPluginServerCapabilities } from '../capabilities'
import { defaultConfig, sessionRecordingConsumerConfig } from '../config/config'
@@ -63,6 +65,7 @@ export async function startPluginsServer(
status.updatePrompt(serverConfig.PLUGIN_SERVER_MODE)
status.info('ℹ️', `${serverConfig.WORKER_CONCURRENCY} workers, ${serverConfig.TASKS_PER_WORKER} tasks per worker`)
+ runStartupProfiles(serverConfig)
// Structure containing initialized clients for Postgres, Kafka, Redis, etc.
let hub: Hub | undefined
@@ -508,3 +511,26 @@ const kafkaProtocolErrors = new Counter({
help: 'Kafka protocol errors encountered, by type',
labelNames: ['type', 'code'],
})
+
+function runStartupProfiles(config: PluginsServerConfig) {
+ if (config.STARTUP_PROFILE_CPU) {
+ status.info('🩺', `Collecting cpu profile...`)
+ v8Profiler.setGenerateType(1)
+ v8Profiler.startProfiling('startup', true)
+ setTimeout(() => {
+ const profile = v8Profiler.stopProfiling('startup')
+ fs.writeFileSync('./startup.cpuprofile', JSON.stringify(profile))
+ status.info('🩺', `Wrote cpu profile to disk`)
+ profile.delete()
+ }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000)
+ }
+ if (config.STARTUP_PROFILE_HEAP) {
+ status.info('🩺', `Collecting heap profile...`)
+ v8Profiler.startSamplingHeapProfiling(config.STARTUP_PROFILE_HEAP_INTERVAL, config.STARTUP_PROFILE_HEAP_DEPTH)
+ setTimeout(() => {
+ const profile = v8Profiler.stopSamplingHeapProfiling()
+ fs.writeFileSync('./startup.heapprofile', JSON.stringify(profile))
+ status.info('🩺', `Wrote heap profile to disk`)
+ }, config.STARTUP_PROFILE_DURATION_SECONDS * 1000)
+ }
+}
diff --git a/plugin-server/src/main/services/http-server.ts b/plugin-server/src/main/services/http-server.ts
index bccee47d21e2f..0d84c9815f5cf 100644
--- a/plugin-server/src/main/services/http-server.ts
+++ b/plugin-server/src/main/services/http-server.ts
@@ -155,8 +155,13 @@ function exportProfile(req: IncomingMessage, res: ServerResponse) {
}, durationSeconds * 1000)
break
case 'heap':
+ // Additional params for sampling heap profile, higher precision means bigger profile.
+ // Defaults are taken from https://v8.github.io/api/head/classv8_1_1HeapProfiler.html
+ const interval = url.searchParams.get('interval') ? parseInt(url.searchParams.get('interval')!) : 512 * 1024
+ const depth = url.searchParams.get('depth') ? parseInt(url.searchParams.get('depth')!) : 16
+
sendHeaders('heapprofile')
- v8Profiler.startSamplingHeapProfiling()
+ v8Profiler.startSamplingHeapProfiling(interval, depth)
setTimeout(() => {
outputProfileResult(res, type, v8Profiler.stopSamplingHeapProfiling())
}, durationSeconds * 1000)
diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts
index 62463957ad249..9a0e0f4ebfe52 100644
--- a/plugin-server/src/types.ts
+++ b/plugin-server/src/types.ts
@@ -21,7 +21,7 @@ import { VM } from 'vm2'
import { ObjectStorage } from './main/services/object_storage'
import { DB } from './utils/db/db'
import { KafkaProducerWrapper } from './utils/db/kafka-producer-wrapper'
-import { PostgresRouter } from './utils/db/postgres' /** Re-export Element from scaffolding, for backwards compat. */
+import { PostgresRouter } from './utils/db/postgres'
import { UUID } from './utils/utils'
import { AppMetrics } from './worker/ingestion/app-metrics'
import { EventPipelineResult } from './worker/ingestion/event-pipeline/runner'
@@ -33,8 +33,7 @@ import { RootAccessManager } from './worker/vm/extensions/helpers/root-acess-man
import { LazyPluginVM } from './worker/vm/lazy'
import { PromiseManager } from './worker/vm/promise-manager'
-/** Re-export Element from scaffolding, for backwards compat. */
-export { Element } from '@posthog/plugin-scaffold'
+export { Element } from '@posthog/plugin-scaffold' // Re-export Element from scaffolding, for backwards compat.
type Brand = K & { __brand: T }
@@ -130,6 +129,7 @@ export interface PluginsServerConfig {
KAFKA_SASL_PASSWORD: string | undefined
KAFKA_CLIENT_RACK: string | undefined
KAFKA_CONSUMPTION_USE_RDKAFKA: boolean
+ KAFKA_CONSUMPTION_RDKAFKA_COOPERATIVE_REBALANCE: boolean
KAFKA_CONSUMPTION_MAX_BYTES: number
KAFKA_CONSUMPTION_MAX_BYTES_PER_PARTITION: number
KAFKA_CONSUMPTION_MAX_WAIT_MS: number // fetch.wait.max.ms rdkafka parameter
@@ -189,6 +189,7 @@ export interface PluginsServerConfig {
OBJECT_STORAGE_SECRET_ACCESS_KEY: string
OBJECT_STORAGE_BUCKET: string // the object storage bucket name
PLUGIN_SERVER_MODE: PluginServerMode | null
+ PLUGIN_LOAD_SEQUENTIALLY: boolean // could help with reducing memory usage spikes on startup
KAFKAJS_LOG_LEVEL: 'NOTHING' | 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'
HISTORICAL_EXPORTS_ENABLED: boolean // enables historical exports for export apps
HISTORICAL_EXPORTS_MAX_RETRY_COUNT: number
@@ -201,6 +202,13 @@ export interface PluginsServerConfig {
EVENT_OVERFLOW_BUCKET_REPLENISH_RATE: number
CLOUD_DEPLOYMENT: string
+ // dump profiles to disk, covering the first N seconds of runtime
+ STARTUP_PROFILE_DURATION_SECONDS: number
+ STARTUP_PROFILE_CPU: boolean
+ STARTUP_PROFILE_HEAP: boolean
+ STARTUP_PROFILE_HEAP_INTERVAL: number
+ STARTUP_PROFILE_HEAP_DEPTH: number
+
// local directory might be a volume mount or a directory on disk (e.g. in local dev)
SESSION_RECORDING_LOCAL_DIRECTORY: string
SESSION_RECORDING_MAX_BUFFER_AGE_SECONDS: number
diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts
index 710a163752a6b..4e37d8a5cd715 100644
--- a/plugin-server/src/utils/db/hub.ts
+++ b/plugin-server/src/utils/db/hub.ts
@@ -91,7 +91,6 @@ export async function createHub(
: undefined,
rejectUnauthorized: serverConfig.CLICKHOUSE_CA ? false : undefined,
})
- await clickhouse.querying('SELECT 1') // test that the connection works
status.info('👍', `ClickHouse ready`)
status.info('🤔', `Connecting to Kafka...`)
diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts
index 49db8914194f6..9e4eb0a3c11b7 100644
--- a/plugin-server/src/utils/db/utils.ts
+++ b/plugin-server/src/utils/db/utils.ts
@@ -39,7 +39,22 @@ export function timeoutGuard(
}, timeout)
}
-const campaignParams = new Set([
+const eventToPersonProperties = new Set([
+ // mobile params
+ '$app_build',
+ '$app_name',
+ '$app_namespace',
+ '$app_version',
+ // web params
+ '$browser',
+ '$browser_version',
+ '$device_type',
+ '$current_url',
+ '$pathname',
+ '$os',
+ '$referring_domain',
+ '$referrer',
+ // campaign params
'utm_source',
'utm_medium',
'utm_campaign',
@@ -50,31 +65,29 @@ const campaignParams = new Set([
'fbclid',
'msclkid',
])
-const initialParams = new Set([
- '$browser',
- '$browser_version',
- '$device_type',
- '$current_url',
- '$pathname',
- '$os',
- '$referring_domain',
- '$referrer',
-])
-const combinedParams = new Set([...campaignParams, ...initialParams])
/** If we get new UTM params, make sure we set those **/
export function personInitialAndUTMProperties(properties: Properties): Properties {
const propertiesCopy = { ...properties }
- const maybeSet = Object.entries(properties).filter(([key]) => campaignParams.has(key))
- const maybeSetInitial = Object.entries(properties)
- .filter(([key]) => combinedParams.has(key))
- .map(([key, value]) => [`$initial_${key.replace('$', '')}`, value])
- if (Object.keys(maybeSet).length > 0) {
+ const propertiesForPerson: [string, any][] = Object.entries(properties).filter(([key]) =>
+ eventToPersonProperties.has(key)
+ )
+
+ // all potential params are checked for $initial_ values and added to $set_once
+ const maybeSetOnce: [string, any][] = propertiesForPerson.map(([key, value]) => [
+ `$initial_${key.replace('$', '')}`,
+ value,
+ ])
+
+ // all found are also then added to $set
+ const maybeSet: [string, any][] = propertiesForPerson
+
+ if (maybeSet.length > 0) {
propertiesCopy.$set = { ...(properties.$set || {}), ...Object.fromEntries(maybeSet) }
}
- if (Object.keys(maybeSetInitial).length > 0) {
- propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetInitial) }
+ if (maybeSetOnce.length > 0) {
+ propertiesCopy.$set_once = { ...(properties.$set_once || {}), ...Object.fromEntries(maybeSetOnce) }
}
return propertiesCopy
}
diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts
index 69c56640bf886..aace016721449 100644
--- a/plugin-server/src/utils/utils.ts
+++ b/plugin-server/src/utils/utils.ts
@@ -312,14 +312,6 @@ export function escapeClickHouseString(string: string): string {
return string.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
}
-export function groupIntoBatches(array: T[], batchSize: number): T[][] {
- const batches = []
- for (let i = 0; i < array.length; i += batchSize) {
- batches.push(array.slice(i, i + batchSize))
- }
- return batches
-}
-
/** Standardize JS code used internally to form without extraneous indentation. Template literal function. */
export function code(strings: TemplateStringsArray): string {
const stringsConcat = strings.join('…')
diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts
index e5f1327895cfa..72a82a07d0aec 100644
--- a/plugin-server/src/worker/ingestion/person-state.ts
+++ b/plugin-server/src/worker/ingestion/person-state.ts
@@ -17,9 +17,17 @@ import { castTimestampOrNow, UUIDT } from '../../utils/utils'
import { captureIngestionWarning } from './utils'
const MAX_FAILED_PERSON_MERGE_ATTEMPTS = 3
+
+export const mergeFinalFailuresCounter = new Counter({
+ name: 'person_merge_final_failure_total',
+ help: 'Number of person merge final failures.',
+})
+
+// used to prevent identify from being used with generic IDs
+// that we can safely assume stem from a bug or mistake
// used to prevent identify from being used with generic IDs
// that we can safely assume stem from a bug or mistake
-const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([
+const BARE_CASE_INSENSITIVE_ILLEGAL_IDS = [
'anonymous',
'guest',
'distinctid',
@@ -30,17 +38,34 @@ const CASE_INSENSITIVE_ILLEGAL_IDS = new Set([
'undefined',
'true',
'false',
-])
-
-export const mergeFinalFailuresCounter = new Counter({
- name: 'person_merge_final_failure_total',
- help: 'Number of person merge final failures.',
-})
-
-const CASE_SENSITIVE_ILLEGAL_IDS = new Set(['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined'])
+]
+
+const BARE_CASE_SENSITIVE_ILLEGAL_IDS = ['[object Object]', 'NaN', 'None', 'none', 'null', '0', 'undefined']
+
+// we have seen illegal ids received but wrapped in double quotes
+// to protect ourselves from this we'll add the single- and double-quoted versions of the illegal ids
+const singleQuoteIds = (ids: string[]) => ids.map((id) => `'${id}'`)
+const doubleQuoteIds = (ids: string[]) => ids.map((id) => `"${id}"`)
+
+// some ids are illegal regardless of casing
+// while others are illegal only when cased
+// so, for example, we want to forbid `NaN` but not `nan`
+// but, we will forbid `uNdEfInEd` and `undefined`
+const CASE_INSENSITIVE_ILLEGAL_IDS = new Set(
+ BARE_CASE_INSENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS)).concat(
+ doubleQuoteIds(BARE_CASE_INSENSITIVE_ILLEGAL_IDS)
+ )
+)
+
+const CASE_SENSITIVE_ILLEGAL_IDS = new Set(
+ BARE_CASE_SENSITIVE_ILLEGAL_IDS.concat(singleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS)).concat(
+ doubleQuoteIds(BARE_CASE_SENSITIVE_ILLEGAL_IDS)
+ )
+)
const isDistinctIdIllegal = (id: string): boolean => {
- return id.trim() === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id)
+ const trimmed = id.trim()
+ return trimmed === '' || CASE_INSENSITIVE_ILLEGAL_IDS.has(id.toLowerCase()) || CASE_SENSITIVE_ILLEGAL_IDS.has(id)
}
// This class is responsible for creating/updating a single person through the process-event pipeline
@@ -245,7 +270,7 @@ export class PersonState {
this.teamId,
this.timestamp
)
- } else if (this.event.event === '$identify' && this.eventProperties['$anon_distinct_id']) {
+ } else if (this.event.event === '$identify' && '$anon_distinct_id' in this.eventProperties) {
return await this.merge(
String(this.eventProperties['$anon_distinct_id']),
this.distinctId,
diff --git a/plugin-server/src/worker/ingestion/process-event.ts b/plugin-server/src/worker/ingestion/process-event.ts
index 82de215adf5e2..44327a6a8bfd5 100644
--- a/plugin-server/src/worker/ingestion/process-event.ts
+++ b/plugin-server/src/worker/ingestion/process-event.ts
@@ -272,7 +272,7 @@ export interface SummarizedSessionRecordingEvent {
team_id: number
distinct_id: string
session_id: string
- first_url: string | undefined
+ first_url: string | null
click_count: number
keypress_count: number
mouse_activity_count: number
@@ -281,6 +281,8 @@ export interface SummarizedSessionRecordingEvent {
console_warn_count: number
console_error_count: number
size: number
+ event_count: number
+ message_count: number
}
export const createSessionReplayEvent = (
@@ -311,7 +313,7 @@ export const createSessionReplayEvent = (
let consoleLogCount = 0
let consoleWarnCount = 0
let consoleErrorCount = 0
- let url: string | undefined = undefined
+ let url: string | null = null
events.forEach((event) => {
if (event.type === 3) {
mouseActivity += 1
@@ -322,7 +324,7 @@ export const createSessionReplayEvent = (
keypressCount += 1
}
}
- if (!!event.data?.href?.trim().length && url === undefined) {
+ if (url === null && !!event.data?.href?.trim().length) {
url = event.data.href
}
if (event.type === 6 && event.data?.plugin === 'rrweb/console@1') {
@@ -339,22 +341,26 @@ export const createSessionReplayEvent = (
const activeTime = activeMilliseconds(events)
+ // NB forces types to be correct e.g. by truncating or rounding
+ // to ensure we don't send floats when we should send an integer
const data: SummarizedSessionRecordingEvent = {
uuid,
team_id: team_id,
- distinct_id: distinct_id,
+ distinct_id: String(distinct_id),
session_id: session_id,
first_timestamp: timestamps[0],
last_timestamp: timestamps[timestamps.length - 1],
- click_count: clickCount,
- keypress_count: keypressCount,
- mouse_activity_count: mouseActivity,
+ click_count: Math.trunc(clickCount),
+ keypress_count: Math.trunc(keypressCount),
+ mouse_activity_count: Math.trunc(mouseActivity),
first_url: url,
- active_milliseconds: activeTime,
- console_log_count: consoleLogCount,
- console_warn_count: consoleWarnCount,
- console_error_count: consoleErrorCount,
- size: Buffer.byteLength(JSON.stringify(events), 'utf8'),
+ active_milliseconds: Math.round(activeTime),
+ console_log_count: Math.trunc(consoleLogCount),
+ console_warn_count: Math.trunc(consoleWarnCount),
+ console_error_count: Math.trunc(consoleErrorCount),
+ size: Math.trunc(Buffer.byteLength(JSON.stringify(events), 'utf8')),
+ event_count: Math.trunc(events.length),
+ message_count: 1,
}
return data
diff --git a/plugin-server/src/worker/plugins/setup.ts b/plugin-server/src/worker/plugins/setup.ts
index 2ff72c9a899aa..4d2d2e33e8807 100644
--- a/plugin-server/src/worker/plugins/setup.ts
+++ b/plugin-server/src/worker/plugins/setup.ts
@@ -26,8 +26,11 @@ export async function setupPlugins(hub: Hub): Promise {
pluginConfig.vm = statelessVms[plugin.id]
} else {
pluginConfig.vm = new LazyPluginVM(hub, pluginConfig)
- pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig))
-
+ if (hub.PLUGIN_LOAD_SEQUENTIALLY) {
+ await loadPlugin(hub, pluginConfig)
+ } else {
+ pluginVMLoadPromises.push(loadPlugin(hub, pluginConfig))
+ }
if (prevConfig) {
void teardownPlugins(hub, prevConfig)
}
diff --git a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
index 617978884fe29..0580f53d2724b 100644
--- a/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/each-batch.test.ts
@@ -9,13 +9,10 @@ import {
eachBatchLegacyIngestion,
splitKafkaJSIngestionBatch,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion-kafkajs'
-import {
- eachBatch,
- eachBatchAppsOnEventHandlers,
-} from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
+import { eachBatchAppsOnEventHandlers } from '../../../src/main/ingestion-queues/batch-processing/each-batch-onevent'
import {
eachBatchWebhooksHandlers,
- groupIntoBatchesWebhooks,
+ groupIntoBatchesByUsage,
} from '../../../src/main/ingestion-queues/batch-processing/each-batch-webhooks'
import {
ClickHouseTimestamp,
@@ -24,7 +21,6 @@ import {
PostIngestionEvent,
RawClickHouseEvent,
} from '../../../src/types'
-import { groupIntoBatches } from '../../../src/utils/utils'
import { ActionManager } from '../../../src/worker/ingestion/action-manager'
import { ActionMatcher } from '../../../src/worker/ingestion/action-matcher'
import { HookCommander } from '../../../src/worker/ingestion/hooks'
@@ -150,26 +146,6 @@ describe('eachBatchX', () => {
}
})
- describe('eachBatch', () => {
- it('calls eachMessage with the correct arguments', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- const batch = createKafkaJSBatch(event)
- await eachBatch(batch, queue, eachMessage, groupIntoBatches, 'key')
-
- expect(eachMessage).toHaveBeenCalledWith({ value: JSON.stringify(event) }, queue)
- })
-
- it('tracks metrics based on the key', async () => {
- const eachMessage = jest.fn(() => Promise.resolve())
- await eachBatch(createKafkaJSBatch(event), queue, eachMessage, groupIntoBatches, 'my_key')
-
- expect(queue.pluginsServer.statsd.timing).toHaveBeenCalledWith(
- 'kafka_queue.each_batch_my_key',
- expect.any(Date)
- )
- })
- })
-
describe('eachBatchAppsOnEventHandlers', () => {
it('calls runAppsOnEventPipeline when useful', async () => {
queue.pluginsServer.pluginConfigsPerTeam.set(2, [pluginConfig39])
@@ -333,11 +309,9 @@ describe('eachBatchX', () => {
kafkaTimestamp: '2020-02-23 00:10:00.00' as ClickHouseTimestamp,
},
])
- const actionManager = new ActionManager(queue.pluginsServer.postgres)
- const actionMatcher = new ActionMatcher(queue.pluginsServer.postgres, actionManager)
- // mock hasWebhooks 10 calls, 1,3,10 should return false, others true
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result = groupIntoBatchesWebhooks(batch.batch.messages, 5, actionMatcher)
+ // teamIDs 1,3,10 should return false, others true
+ const toProcess = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
+ const result = groupIntoBatchesByUsage(batch.batch.messages, 5, toProcess)
expect(result).toEqual([
{
eventBatch: expect.arrayContaining([
@@ -375,8 +349,7 @@ describe('eachBatchX', () => {
])
// make sure that if the last message would be a new batch and if it's going to be excluded we
// still get the last batch as empty with the right offsite and timestamp
- actionMatcher.hasWebhooks = jest.fn((teamId) => teamId !== 1 && teamId !== 3 && teamId !== 10)
- const result2 = groupIntoBatchesWebhooks(batch.batch.messages, 7, actionMatcher)
+ const result2 = groupIntoBatchesByUsage(batch.batch.messages, 7, toProcess)
expect(result2).toEqual([
{
eventBatch: expect.arrayContaining([
diff --git a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
index c0912a2ca499b..31dc19d000f3b 100644
--- a/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/kafka-queue.test.ts
@@ -1,4 +1,7 @@
+import { Assignment } from 'node-rdkafka-acosom'
+
import { KAFKA_EVENTS_PLUGIN_INGESTION } from '../../../src/config/kafka-topics'
+import { countPartitionsPerTopic } from '../../../src/kafka/consumer'
import { ServerInstance, startPluginsServer } from '../../../src/main/pluginsServer'
import { LogLevel, PluginsServerConfig } from '../../../src/types'
import { Hub } from '../../../src/types'
@@ -79,3 +82,22 @@ describe.skip('IngestionConsumer', () => {
expect(bufferCalls.length).toEqual(1)
})
})
+
+describe('countPartitionsPerTopic', () => {
+ it('should correctly count the number of partitions per topic', () => {
+ const assignments: Assignment[] = [
+ { topic: 'topic1', partition: 0 },
+ { topic: 'topic1', partition: 1 },
+ { topic: 'topic2', partition: 0 },
+ { topic: 'topic2', partition: 1 },
+ { topic: 'topic2', partition: 2 },
+ { topic: 'topic3', partition: 0 },
+ ]
+
+ const result = countPartitionsPerTopic(assignments)
+ expect(result.get('topic1')).toBe(2)
+ expect(result.get('topic2')).toBe(3)
+ expect(result.get('topic3')).toBe(1)
+ expect(result.size).toBe(3)
+ })
+})
diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
index 7bdb5d749e778..105ebf249e2ba 100644
--- a/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
+++ b/plugin-server/tests/main/ingestion-queues/session-recording/fixtures.ts
@@ -16,7 +16,6 @@ export function createIncomingRecordingMessage(
session_id: 'session_id_1',
window_id: 'window_id_1',
events: [{ ...jsonFullSnapshot }],
- replayIngestionConsumer: 'v2',
...partialIncomingMessage,
metadata: {
diff --git a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
index 87b66a7210fc1..c792c9b1947e5 100644
--- a/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
+++ b/plugin-server/tests/main/ingestion-queues/session-recording/session-recordings-consumer-v2.test.ts
@@ -211,7 +211,6 @@ describe('ingester', () => {
timestamp: 1,
topic: 'the_topic',
},
- replayIngestionConsumer: 'v2',
session_id: '018a47c2-2f4a-70a8-b480-5e51d8b8d070',
team_id: 1,
window_id: '018a47c2-2f4a-70a8-b480-5e52f5480448',
diff --git a/plugin-server/tests/main/process-event.test.ts b/plugin-server/tests/main/process-event.test.ts
index 25251874e2086..94505831b8452 100644
--- a/plugin-server/tests/main/process-event.test.ts
+++ b/plugin-server/tests/main/process-event.test.ts
@@ -315,7 +315,7 @@ test('capture new person', async () => {
let persons = await hub.db.fetchPersons()
expect(persons[0].version).toEqual(0)
expect(persons[0].created_at).toEqual(now)
- let expectedProps = {
+ let expectedProps: Record = {
$creator_event_uuid: uuid,
$initial_browser: 'Chrome',
$initial_browser_version: '95',
@@ -329,6 +329,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -343,7 +349,17 @@ test('capture new person', async () => {
expect(events[0].properties).toEqual({
$ip: '127.0.0.1',
$os: 'Mac OS X',
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
+ $set: {
+ utm_medium: 'twitter',
+ gclid: 'GOOGLE ADS ID',
+ msclkid: 'BING ADS ID',
+ $browser: 'Chrome',
+ $browser_version: '95',
+ $current_url: 'https://test.com',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
+ },
token: 'THIS IS NOT A TOKEN FOR TEAM 2',
$browser: 'Chrome',
$set_once: {
@@ -412,6 +428,12 @@ test('capture new person', async () => {
msclkid: 'BING ADS ID',
$initial_referrer: 'https://google.com/?q=posthog',
$initial_referring_domain: 'https://google.com',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
+ $os: 'Mac OS X',
+ $referrer: 'https://google.com/?q=posthog',
+ $referring_domain: 'https://google.com',
}
expect(persons[0].properties).toEqual(expectedProps)
@@ -425,6 +447,9 @@ test('capture new person', async () => {
expect(events[1].properties.$set).toEqual({
utm_medium: 'instagram',
+ $browser: 'Firefox',
+ $browser_version: 80,
+ $current_url: 'https://test.com/pricing',
})
expect(events[1].properties.$set_once).toEqual({
$initial_browser: 'Firefox',
@@ -481,6 +506,9 @@ test('capture new person', async () => {
expect(persons[0].version).toEqual(1)
expect(events[2].properties.$set).toEqual({
+ $browser: 'Firefox',
+ $current_url: 'https://test.com/pricing',
+
utm_medium: 'instagram',
})
expect(events[2].properties.$set_once).toEqual({
@@ -1236,6 +1264,8 @@ const sessionReplayEventTestCases: {
| 'console_warn_count'
| 'console_error_count'
| 'size'
+ | 'event_count'
+ | 'message_count'
>
}[] = [
{
@@ -1244,7 +1274,7 @@ const sessionReplayEventTestCases: {
click_count: 1,
keypress_count: 0,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1252,6 +1282,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1260,7 +1292,7 @@ const sessionReplayEventTestCases: {
click_count: 0,
keypress_count: 1,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1268,6 +1300,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 73,
+ event_count: 1,
+ message_count: 1,
},
},
{
@@ -1316,7 +1350,7 @@ const sessionReplayEventTestCases: {
click_count: 0,
keypress_count: 1,
mouse_activity_count: 1,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.469',
last_timestamp: '2023-04-25 18:58:13.469',
active_milliseconds: 1, // one event, but it's active, so active time is 1ms not 0
@@ -1324,6 +1358,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 3,
console_error_count: 1,
size: 762,
+ event_count: 7,
+ message_count: 1,
},
},
{
@@ -1362,6 +1398,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 213,
+ event_count: 2,
+ message_count: 1,
},
},
{
@@ -1381,7 +1419,7 @@ const sessionReplayEventTestCases: {
click_count: 6,
keypress_count: 0,
mouse_activity_count: 6,
- first_url: undefined,
+ first_url: null,
first_timestamp: '2023-04-25 18:58:13.000',
last_timestamp: '2023-04-25 18:58:19.000',
active_milliseconds: 6000, // can sum up the activity across windows
@@ -1389,6 +1427,8 @@ const sessionReplayEventTestCases: {
console_warn_count: 0,
console_error_count: 0,
size: 433,
+ event_count: 6,
+ message_count: 1,
},
},
]
diff --git a/plugin-server/tests/utils/db/utils.test.ts b/plugin-server/tests/utils/db/utils.test.ts
index 5201b8e60b803..420c645472ff3 100644
--- a/plugin-server/tests/utils/db/utils.test.ts
+++ b/plugin-server/tests/utils/db/utils.test.ts
@@ -17,41 +17,74 @@ describe('personInitialAndUTMProperties()', () => {
{ tag_name: 'a', nth_child: 1, nth_of_type: 2, attr__class: 'btn btn-sm' },
{ tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
],
+ $app_build: 2,
+ $app_name: 'my app',
+ $app_namespace: 'com.posthog.myapp',
+ $app_version: '1.2.3',
}
- expect(personInitialAndUTMProperties(properties)).toEqual({
- distinct_id: 2,
- $browser: 'Chrome',
- $current_url: 'https://test.com',
- $os: 'Mac OS X',
- $browser_version: '95',
- $referring_domain: 'https://google.com',
- $referrer: 'https://google.com/?q=posthog',
- utm_medium: 'twitter',
- gclid: 'GOOGLE ADS ID',
- msclkid: 'BING ADS ID',
- $elements: [
- {
- tag_name: 'a',
- nth_child: 1,
- nth_of_type: 2,
- attr__class: 'btn btn-sm',
+ expect(personInitialAndUTMProperties(properties)).toMatchInlineSnapshot(`
+ Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$elements": Array [
+ Object {
+ "attr__class": "btn btn-sm",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "a",
},
- { tag_name: 'div', nth_child: 1, nth_of_type: 2, $el_text: '💻' },
- ],
- $set: { utm_medium: 'twitter', gclid: 'GOOGLE ADS ID', msclkid: 'BING ADS ID' },
- $set_once: {
- $initial_browser: 'Chrome',
- $initial_current_url: 'https://test.com',
- $initial_os: 'Mac OS X',
- $initial_browser_version: '95',
- $initial_utm_medium: 'twitter',
- $initial_gclid: 'GOOGLE ADS ID',
- $initial_msclkid: 'BING ADS ID',
- $initial_referring_domain: 'https://google.com',
- $initial_referrer: 'https://google.com/?q=posthog',
- },
- })
+ Object {
+ "$el_text": "💻",
+ "nth_child": 1,
+ "nth_of_type": 2,
+ "tag_name": "div",
+ },
+ ],
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "$set": Object {
+ "$app_build": 2,
+ "$app_name": "my app",
+ "$app_namespace": "com.posthog.myapp",
+ "$app_version": "1.2.3",
+ "$browser": "Chrome",
+ "$browser_version": "95",
+ "$current_url": "https://test.com",
+ "$os": "Mac OS X",
+ "$referrer": "https://google.com/?q=posthog",
+ "$referring_domain": "https://google.com",
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ },
+ "$set_once": Object {
+ "$initial_app_build": 2,
+ "$initial_app_name": "my app",
+ "$initial_app_namespace": "com.posthog.myapp",
+ "$initial_app_version": "1.2.3",
+ "$initial_browser": "Chrome",
+ "$initial_browser_version": "95",
+ "$initial_current_url": "https://test.com",
+ "$initial_gclid": "GOOGLE ADS ID",
+ "$initial_msclkid": "BING ADS ID",
+ "$initial_os": "Mac OS X",
+ "$initial_referrer": "https://google.com/?q=posthog",
+ "$initial_referring_domain": "https://google.com",
+ "$initial_utm_medium": "twitter",
+ },
+ "distinct_id": 2,
+ "gclid": "GOOGLE ADS ID",
+ "msclkid": "BING ADS ID",
+ "utm_medium": "twitter",
+ }
+ `)
})
it('initial current domain regression test', () => {
@@ -62,6 +95,7 @@ describe('personInitialAndUTMProperties()', () => {
expect(personInitialAndUTMProperties(properties)).toEqual({
$current_url: 'https://test.com',
$set_once: { $initial_current_url: 'https://test.com' },
+ $set: { $current_url: 'https://test.com' },
})
})
})
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
index 837079da765eb..343826d81a4f2 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/event-pipeline-integration.test.ts
@@ -105,6 +105,7 @@ describe('Event Pipeline integration test', () => {
$set: {
personProp: 'value',
anotherValue: 2,
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -118,6 +119,7 @@ describe('Event Pipeline integration test', () => {
expect(persons[0].properties).toEqual({
$creator_event_uuid: event.uuid,
$initial_browser: 'Chrome',
+ $browser: 'Chrome',
personProp: 'value',
anotherValue: 2,
})
diff --git a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
index 71d495bcf9bce..d2ce3aa76e383 100644
--- a/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
+++ b/plugin-server/tests/worker/ingestion/event-pipeline/processPersonsStep.test.ts
@@ -85,6 +85,7 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
$browser: 'Chrome',
$set: {
someProp: 'value',
+ $browser: 'Chrome',
},
$set_once: {
$initial_browser: 'Chrome',
@@ -95,7 +96,12 @@ describe.each([[true], [false]])('processPersonsStep()', (poEEmbraceJoin) => {
expect.objectContaining({
id: expect.any(Number),
uuid: expect.any(String),
- properties: { $initial_browser: 'Chrome', someProp: 'value', $creator_event_uuid: expect.any(String) },
+ properties: {
+ $initial_browser: 'Chrome',
+ someProp: 'value',
+ $creator_event_uuid: expect.any(String),
+ $browser: 'Chrome',
+ },
version: 0,
is_identified: false,
})
diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts
index b44f60e8d2dda..66fa35976d274 100644
--- a/plugin-server/tests/worker/ingestion/person-state.test.ts
+++ b/plugin-server/tests/worker/ingestion/person-state.test.ts
@@ -25,17 +25,20 @@ describe('PersonState.update()', () => {
let uuid2: UUIDT
let teamId: number
let poEEmbraceJoin: boolean
+ let organizationId: string
beforeAll(async () => {
;[hub, closeHub] = await createHub({})
await hub.db.clickhouseQuery('SYSTEM STOP MERGES')
+
+ organizationId = await createOrganization(hub.db.postgres)
})
beforeEach(async () => {
poEEmbraceJoin = false
uuid = new UUIDT()
uuid2 = new UUIDT()
- const organizationId = await createOrganization(hub.db.postgres)
+
teamId = await createTeam(hub.db.postgres, organizationId)
jest.spyOn(hub.db, 'fetchPerson')
@@ -1078,10 +1081,11 @@ describe('PersonState.update()', () => {
hub.statsd = { increment: jest.fn() } as any
})
- it('stops $identify if current distinct_id is illegal', async () => {
+ const illegalIds = ['', ' ', 'null', 'undefined', '"undefined"', '[object Object]', '"[object Object]"']
+ it.each(illegalIds)('stops $identify if current distinct_id is illegal: `%s`', async (illegalId: string) => {
const person = await personState({
event: '$identify',
- distinct_id: '[object Object]',
+ distinct_id: illegalId,
properties: {
$anon_distinct_id: 'anonymous_id',
},
@@ -1092,16 +1096,16 @@ describe('PersonState.update()', () => {
expect(persons.length).toEqual(0)
expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', {
- distinctId: '[object Object]',
+ distinctId: illegalId,
})
})
- it('stops $identify if $anon_distinct_id is illegal', async () => {
+ it.each(illegalIds)('stops $identify if $anon_distinct_id is illegal: `%s`', async (illegalId: string) => {
const person = await personState({
event: '$identify',
distinct_id: 'some_distinct_id',
properties: {
- $anon_distinct_id: 'undefined',
+ $anon_distinct_id: illegalId,
},
}).handleIdentifyOrAlias()
@@ -1110,7 +1114,7 @@ describe('PersonState.update()', () => {
expect(persons.length).toEqual(0)
expect(hub.statsd!.increment).toHaveBeenCalledWith('illegal_distinct_ids.total', {
- distinctId: 'undefined',
+ distinctId: illegalId,
})
})
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 52699fa9c3fb3..301601dff1e49 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -1,4 +1,4 @@
-lockfileVersion: '6.1'
+lockfileVersion: '6.0'
settings:
autoInstallPeers: true
@@ -39,8 +39,8 @@ dependencies:
specifier: ^2.1.2
version: 2.1.2(react@16.14.0)
'@rrweb/types':
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
'@sentry/react':
specifier: 7.22.0
version: 7.22.0(react@16.14.0)
@@ -194,9 +194,12 @@ dependencies:
monaco-editor:
specifier: ^0.39.0
version: 0.39.0
+ papaparse:
+ specifier: ^5.4.1
+ version: 5.4.1
posthog-js:
- specifier: 1.78.1
- version: 1.78.1
+ specifier: 1.78.5
+ version: 1.78.5
posthog-js-lite:
specifier: 2.0.0-alpha5
version: 2.0.0-alpha5
@@ -282,8 +285,8 @@ dependencies:
specifier: ^1.5.1
version: 1.5.1
rrweb:
- specifier: ^2.0.0-alpha.9
- version: 2.0.0-alpha.9
+ specifier: ^2.0.0-alpha.11
+ version: 2.0.0-alpha.11
sass:
specifier: ^1.26.2
version: 1.56.0
@@ -432,6 +435,9 @@ devDependencies:
'@types/node':
specifier: ^18.11.9
version: 18.11.9
+ '@types/papaparse':
+ specifier: ^5.3.8
+ version: 5.3.8
'@types/pixelmatch':
specifier: ^5.2.4
version: 5.2.4
@@ -608,7 +614,7 @@ devDependencies:
version: 7.3.1
storybook-addon-pseudo-states:
specifier: 2.1.0
- version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0)
+ version: 2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0)
style-loader:
specifier: ^2.0.0
version: 2.0.0(webpack@5.88.2)
@@ -978,7 +984,7 @@ packages:
engines: {node: '>=6.0.0'}
hasBin: true
dependencies:
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
dev: true
/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.22.5(@babel/core@7.22.10):
@@ -2083,8 +2089,8 @@ packages:
'@babel/helper-validator-identifier': 7.22.5
to-fast-properties: 2.0.0
- /@babel/types@7.22.15:
- resolution: {integrity: sha512-X+NLXr0N8XXmN5ZsaQdm9U2SSC3UbIYq/doL++sueHOTisgZHoKaQtZxGuV2cUPQHMfjKEfg/g6oy7Hm6SKFtA==}
+ /@babel/types@7.22.17:
+ resolution: {integrity: sha512-YSQPHLFtQNE5xN9tHuZnzu8vPr61wVTBZdfv1meex1NBosa4iT05k/Jw06ddJugi4bk7The/oSwQGFcksmEJQg==}
engines: {node: '>=6.9.0'}
dependencies:
'@babel/helper-string-parser': 7.22.5
@@ -3928,10 +3934,10 @@ packages:
type-fest: 2.19.0
dev: false
- /@rrweb/types@2.0.0-alpha.9:
- resolution: {integrity: sha512-yS2KghLSmSSxo6H7tHrJ6u+nWJA9zCXaKFyc79rUSX8RHHSImRqocTqJ8jz794kCIWA90rvaQayRONdHO+vB0Q==}
+ /@rrweb/types@2.0.0-alpha.11:
+ resolution: {integrity: sha512-8ccocIkT5J/bfNRQY85qR/g6p5YQFpgFO2cMt4+Ex7w31Lq0yqZBRaoYEsawQKpLrn5KOHkdn2UTUrna7WMQuA==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/@sentry/browser@7.22.0:
@@ -4519,11 +4525,11 @@ packages:
tiny-invariant: 1.3.1
dev: true
- /@storybook/channels@7.4.0:
- resolution: {integrity: sha512-/1CU0s3npFumzVHLGeubSyPs21O3jNqtSppOjSB9iDTyV2GtQrjh5ntVwebfKpCkUSitx3x7TkCb9dylpEZ8+w==}
+ /@storybook/channels@7.4.1:
+ resolution: {integrity: sha512-gnE1mNrRF+9oCVRMq6MS/tLXJbYmf9P02PCC3KpMLcSsABdH5jcrACejzJVo/kE223knFH7NJc4BBj7+5h0uXA==}
dependencies:
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/global': 5.0.0
qs: 6.11.2
telejson: 7.2.0
@@ -4587,8 +4593,8 @@ packages:
'@storybook/global': 5.0.0
dev: true
- /@storybook/client-logger@7.4.0:
- resolution: {integrity: sha512-4pBnf7+df1wXEVcF1civqxbrtccGGHQkfWQkJo49s53RXvF7SRTcif6XTx0V3cQV0v7I1C5mmLm0LNlmjPRP1Q==}
+ /@storybook/client-logger@7.4.1:
+ resolution: {integrity: sha512-2j0DQlKlPNY8XAaEZv+mUYEUm4dOWg6/Q92UNbvYPRK5qbXUvbMiQco5nmvg4LvMT6y99LhRSW2xrwEx5xKAKw==}
dependencies:
'@storybook/global': 5.0.0
dev: true
@@ -4679,8 +4685,8 @@ packages:
resolution: {integrity: sha512-7Pkgwmj/9B7Z3NNSn2swnviBrg9L1VeYSFw6JJKxtQskt8QoY8LxAsPzVMlHjqRmO6sO7lHo9FgpzIFxdmFaAA==}
dev: true
- /@storybook/core-events@7.4.0:
- resolution: {integrity: sha512-JavEo4dw7TQdF5pSKjk4RtqLgsG2R/eWRI8vZ3ANKa0ploGAnQR/eMTfSxf6TUH3ElBWLJhi+lvUCkKXPQD+dw==}
+ /@storybook/core-events@7.4.1:
+ resolution: {integrity: sha512-F1tGb32XZ4FRfbtXdi4b+zdzWUjFz5rn3TF18mSuBGGXvxKU+4tywgjGQ3dKGdvuP754czn3poSdz2ZW08bLsQ==}
dependencies:
ts-dedent: 2.2.0
dev: true
@@ -4845,20 +4851,20 @@ packages:
ts-dedent: 2.2.0
dev: true
- /@storybook/manager-api@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-sBfkkt0eZGTozeKrbzMtWLEOQrgqdk24OUJlkc2IDaucR1CBNjoCMjNeYg7cLDw0rXE8W3W3AdWtJnfsUbLMAQ==}
+ /@storybook/manager-api@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-nzYasETW20uDWpfST6JFf6c/GSFB/dj7xVtg5EpvAYF8GkErCk9TvNKdLNroRrIYm5VJxHWC2V+CJ07RuX3Glw==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
- '@storybook/channels': 7.4.0
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/channels': 7.4.1
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/csf': 0.1.1
'@storybook/global': 5.0.0
- '@storybook/router': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/theming': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/types': 7.4.0
+ '@storybook/router': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/theming': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/types': 7.4.1
dequal: 2.0.3
lodash: 4.17.21
memoizerific: 1.11.3
@@ -4954,15 +4960,15 @@ packages:
util-deprecate: 1.0.2
dev: true
- /@storybook/preview-api@7.4.0:
- resolution: {integrity: sha512-ndXO0Nx+eE7ktVE4EqHpQZ0guX7yYBdruDdJ7B739C0+OoPWsJN7jAzUqq0NXaBcYrdaU5gTy+KnWJUt8R+OyA==}
+ /@storybook/preview-api@7.4.1:
+ resolution: {integrity: sha512-swmosWK73lP0CXDKMOwYIaaId28+muPDYX2V/0JmIOA+45HFXimeXZs3XsgVgQMutVF51QqnDA0pfrNgRofHgQ==}
dependencies:
- '@storybook/channels': 7.4.0
- '@storybook/client-logger': 7.4.0
- '@storybook/core-events': 7.4.0
+ '@storybook/channels': 7.4.1
+ '@storybook/client-logger': 7.4.1
+ '@storybook/core-events': 7.4.1
'@storybook/csf': 0.1.1
'@storybook/global': 5.0.0
- '@storybook/types': 7.4.0
+ '@storybook/types': 7.4.1
'@types/qs': 6.9.8
dequal: 2.0.3
lodash: 4.17.21
@@ -5097,13 +5103,13 @@ packages:
react-dom: 16.14.0(react@16.14.0)
dev: true
- /@storybook/router@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-IATdtFL5C3ryjNQSwaQfrmiOZiVFoVNMevMoBGDC++g0laSW40TGiNK6fUjUDBKuOgbuDt4Svfbl29k21GefEg==}
+ /@storybook/router@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-7tE1B18jb+5+ujXd3BHcub85QnytIVBNA0iAo+o8MNwArISyodqp12y2D3w+QpXkg0GtPhAp/CMhzpyxotPhRQ==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
- '@storybook/client-logger': 7.4.0
+ '@storybook/client-logger': 7.4.1
memoizerific: 1.11.3
qs: 6.11.2
react: 16.14.0
@@ -5196,14 +5202,14 @@ packages:
react-dom: 16.14.0(react@16.14.0)
dev: true
- /@storybook/theming@7.4.0(react-dom@16.14.0)(react@16.14.0):
- resolution: {integrity: sha512-eLjEf6G3cqlegfutF/iUrec9LrUjKDj7K4ZhGdACWrf7bQcODs99EK62e9/d8GNKr4b+QMSEuM6XNGaqdPnuzQ==}
+ /@storybook/theming@7.4.1(react-dom@16.14.0)(react@16.14.0):
+ resolution: {integrity: sha512-a4QajZbnYumq8ovtn7nW7BeNrk/TaWyKmUrIz4w08I6ghzESJA4aCWZ6394awbrruiIOzCCKOUq4mfWEsc8W6A==}
peerDependencies:
react: ^16.8.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
dependencies:
'@emotion/use-insertion-effect-with-fallbacks': 1.0.1(react@16.14.0)
- '@storybook/client-logger': 7.4.0
+ '@storybook/client-logger': 7.4.1
'@storybook/global': 5.0.0
memoizerific: 1.11.3
react: 16.14.0
@@ -5219,13 +5225,12 @@ packages:
file-system-cache: 2.3.0
dev: true
- /@storybook/types@7.4.0:
- resolution: {integrity: sha512-XyzYkmeklywxvElPrIWLczi/PWtEdgTL6ToT3++FVxptsC2LZKS3Ue+sBcQ9xRZhkRemw4HQHwed5EW3dO8yUg==}
+ /@storybook/types@7.4.1:
+ resolution: {integrity: sha512-bjt1YDG9AocFBhIFRvGGbYZPlD223p+qAFcFgYdezU16fFE4ZGFUzUuq2ERkOofL7a2+OzLTCQ/SKe1jFkXCxQ==}
dependencies:
- '@storybook/channels': 7.4.0
+ '@storybook/channels': 7.4.1
'@types/babel__core': 7.20.1
'@types/express': 4.17.17
- '@types/react': 16.14.34
file-system-cache: 2.3.0
dev: true
@@ -5721,7 +5726,7 @@ packages:
resolution: {integrity: sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw==}
dependencies:
'@babel/parser': 7.22.16
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
'@types/babel__generator': 7.6.4
'@types/babel__template': 7.4.1
'@types/babel__traverse': 7.20.1
@@ -5749,7 +5754,7 @@ packages:
/@types/babel__traverse@7.20.1:
resolution: {integrity: sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg==}
dependencies:
- '@babel/types': 7.22.15
+ '@babel/types': 7.22.17
dev: true
/@types/body-parser@1.19.2:
@@ -6213,6 +6218,12 @@ packages:
resolution: {integrity: sha512-sn7L+qQ6RLPdXRoiaE7bZ/Ek+o4uICma/lBFPyJEKDTPTBP1W8u0c4baj3EiS4DiqLs+Hk+KUGvMVJtAw3ePJg==}
dev: false
+ /@types/papaparse@5.3.8:
+ resolution: {integrity: sha512-ArKIEOOWULbhi53wkAiRy1ze4wvrTfhpAj7Yfzva+EkmX2sV8PpFB+xqzJfzXNzK4me95FJH9QZt5NXFVGzOoQ==}
+ dependencies:
+ '@types/node': 18.11.9
+ dev: true
+
/@types/parse-json@4.0.0:
resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==}
dev: true
@@ -12994,7 +13005,7 @@ packages:
dependencies:
universalify: 2.0.0
optionalDependencies:
- graceful-fs: 4.2.10
+ graceful-fs: 4.2.11
/jsprim@2.0.2:
resolution: {integrity: sha512-gqXddjPqQ6G40VdnI6T6yObEC+pDNvyP95wdQhkWkg7crHH3km5qP1FsOXEkzEQwnz6gz5qGTn1c2Y52wP3OyQ==}
@@ -14266,6 +14277,10 @@ packages:
resolution: {integrity: sha512-NUcwaKxUxWrZLpDG+z/xZaCgQITkA/Dv4V/T6bw7VON6l1Xz/VnrBqrYjZQ12TamKHzITTfOEIYUj48y2KXImA==}
dev: true
+ /papaparse@5.4.1:
+ resolution: {integrity: sha512-HipMsgJkZu8br23pW15uvo6sib6wne/4woLZPlFf3rpDyMe9ywEXUsuD7+6K9PRkJlVT51j/sCOYDKGGS3ZJrw==}
+ dev: false
+
/param-case@3.0.4:
resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==}
dependencies:
@@ -14909,8 +14924,8 @@ packages:
resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==}
dev: false
- /posthog-js@1.78.1:
- resolution: {integrity: sha512-5tJoF56gGg4B4CSlLbWHuTpi7Ch7wksjCkPonHlQAc61ZZRymTB63tRheCvkcf+Omf8PBkO+2NJ0XEgrkRHE0A==}
+ /posthog-js@1.78.5:
+ resolution: {integrity: sha512-UUipML52LEyks7Pbx/3dpBJc2iPJrW+Ss6Y0BiIygn+QZoBjIe1WjE4Ep+Fnz7+cX1axex/ZiYholBnW7E4Aug==}
dependencies:
fflate: 0.4.8
dev: false
@@ -16608,27 +16623,27 @@ packages:
resolution: {integrity: sha512-85aZYCxweiD5J8yTEbw+E6A27zSnLPNDL0WfPdw3YYodq7WjnTKo0q4dtyQ2gz23iPT8Q9CUyJtAaUNcTxRf5Q==}
dev: false
- /rrdom@2.0.0-alpha.9:
- resolution: {integrity: sha512-jfaZ8tHi098P4GpPEtkOwnkucyKA5eGanAVHGPklzCqAeEq1Yx+9/y8AeOtF3yiobqKKkW8lLvFH2KrBH1CZlQ==}
+ /rrdom@2.0.0-alpha.11:
+ resolution: {integrity: sha512-U37m0t4jTz63wnVRcOQ5qFzSTrI5RdNgeXnHAha2Fmh9+1K+XuCx421a8D1wZk3WcDc2sFz/04FVdM0OD2caHg==}
dependencies:
- rrweb-snapshot: 2.0.0-alpha.9
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
- /rrweb-snapshot@2.0.0-alpha.9:
- resolution: {integrity: sha512-mHg1uUE2iUf0MXLE//4r5cMynkbduwmaOEis4gC7EuqkUAC1pYoLpcYYVt9lD6dgYIF6BmK6dgLLzMpD/tTyyA==}
+ /rrweb-snapshot@2.0.0-alpha.11:
+ resolution: {integrity: sha512-N0dzeJA2VhrlSOadkKwCVmV/DuNOwBH+Lhx89hAf9PQK4lCS8AP4AaylhqUdZOYHqwVjqsYel/uZ4hN79vuLhw==}
dev: false
- /rrweb@2.0.0-alpha.9:
- resolution: {integrity: sha512-8E2yiLY7IrFjDcVUZ7AcQtdBNFuTIsBrlCMpbyLua6X64dGRhOZ+IUDXLnAbNj5oymZgFtZu2UERG9rmV2VAng==}
+ /rrweb@2.0.0-alpha.11:
+ resolution: {integrity: sha512-vJ2gNvF+pUG9C2aaau7iSNqhWBSc4BwtUO4FpegOtDObuH4PIaxNJOlgHz82+WxKr9XPm93ER0LqmNpy0KYdKg==}
dependencies:
- '@rrweb/types': 2.0.0-alpha.9
+ '@rrweb/types': 2.0.0-alpha.11
'@types/css-font-loading-module': 0.0.7
'@xstate/fsm': 1.6.5
base64-arraybuffer: 1.0.2
fflate: 0.4.8
mitt: 3.0.0
- rrdom: 2.0.0-alpha.9
- rrweb-snapshot: 2.0.0-alpha.9
+ rrdom: 2.0.0-alpha.11
+ rrweb-snapshot: 2.0.0-alpha.11
dev: false
/rtl-css-js@1.16.0:
@@ -17135,7 +17150,7 @@ packages:
resolution: {integrity: sha512-siT1RiqlfQnGqgT/YzXVUNsom9S0H1OX+dpdGN1xkyYATo4I6sep5NmsRD/40s3IIOvlCq6akxkqG82urIZW1w==}
dev: true
- /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.0)(@storybook/preview-api@7.4.0)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0):
+ /storybook-addon-pseudo-states@2.1.0(@storybook/components@7.3.1)(@storybook/core-events@7.3.1)(@storybook/manager-api@7.4.1)(@storybook/preview-api@7.4.1)(@storybook/theming@7.3.1)(react-dom@16.14.0)(react@16.14.0):
resolution: {integrity: sha512-AwbCL1OiZ16aIeXSP/IOovkMwXy7NTZqmjkz+UM2guSGjvogHNA95NhuVyWoqieE+QWUpGO48+MrBGMeeJcHOQ==}
peerDependencies:
'@storybook/components': ^7.0.0
@@ -17153,8 +17168,8 @@ packages:
dependencies:
'@storybook/components': 7.3.1(@types/react-dom@16.9.17)(@types/react@16.14.34)(react-dom@16.14.0)(react@16.14.0)
'@storybook/core-events': 7.3.1
- '@storybook/manager-api': 7.4.0(react-dom@16.14.0)(react@16.14.0)
- '@storybook/preview-api': 7.4.0
+ '@storybook/manager-api': 7.4.1(react-dom@16.14.0)(react@16.14.0)
+ '@storybook/preview-api': 7.4.1
'@storybook/theming': 7.3.1(react-dom@16.14.0)(react@16.14.0)
react: 16.14.0
react-dom: 16.14.0(react@16.14.0)
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index bdd8ecf3ed555..f61543e14f5cb 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -1,9 +1,8 @@
import json
from typing import Any, Dict, List, Optional, cast
-from django.db.models import QuerySet
+from django.db.models import QuerySet, Q
from django.conf import settings
-from django.db.models.query_utils import Q
from rest_framework import authentication, exceptions, request, serializers, status, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated
@@ -70,6 +69,7 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo
rollout_percentage = serializers.SerializerMethodField()
experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
+ surveys: serializers.SerializerMethodField = serializers.SerializerMethodField()
features: serializers.SerializerMethodField = serializers.SerializerMethodField()
usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True)
analytics_dashboards = serializers.PrimaryKeyRelatedField(
@@ -100,6 +100,7 @@ class Meta:
"rollout_percentage",
"ensure_experience_continuity",
"experiment_set",
+ "surveys",
"features",
"rollback_conditions",
"performed_rollback",
@@ -129,6 +130,12 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict:
return MinimalEarlyAccessFeatureSerializer(feature_flag.features, many=True).data
+ def get_surveys(self, feature_flag: FeatureFlag) -> Dict:
+ from posthog.api.survey import SurveyAPISerializer
+
+ return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore
+ # ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship
+
def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]:
if self.get_is_simple_flag(feature_flag):
return feature_flag.conditions[0].get("rollout_percentage")
@@ -343,7 +350,9 @@ def get_queryset(self) -> QuerySet:
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
)
+
survey_targeting_flags = Survey.objects.filter(team=self.team, targeting_flag__isnull=False).values_list(
"targeting_flag_id", flat=True
)
@@ -434,6 +443,7 @@ def my_flags(self, request: request.Request, **kwargs):
.prefetch_related("experiment_set")
.prefetch_related("features")
.prefetch_related("analytics_dashboards")
+ .prefetch_related("surveys_linked_flag")
.select_related("created_by")
.order_by("-created_at")
)
diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py
index 7f3cfae9be957..5c25efe42815d 100644
--- a/posthog/api/notebook.py
+++ b/posthog/api/notebook.py
@@ -1,5 +1,5 @@
from typing import Dict, List, Optional, Any
-
+from django.db.models import Q
import structlog
from django.db import transaction
from django.db.models import QuerySet
@@ -74,6 +74,7 @@ class Meta:
"short_id",
"title",
"content",
+ "text_content",
"version",
"deleted",
"created_at",
@@ -250,8 +251,13 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query
queryset = queryset.filter(
last_modified_at__lt=relative_date_parse(request.GET["date_to"], self.team.timezone_info)
)
- elif key == "s":
- queryset = queryset.filter(title__icontains=request.GET["s"])
+ elif key == "search":
+ queryset = queryset.filter(
+ # some notebooks have no text_content until next saved, so we need to check the title too
+ # TODO this can be removed once all/most notebooks have text_content
+ Q(title__search=request.GET["search"])
+ | Q(text_content__search=request.GET["search"])
+ )
elif key == "contains":
contains = request.GET["contains"]
match_pairs = contains.replace(",", " ").split(" ")
diff --git a/posthog/api/query.py b/posthog/api/query.py
index f6c9e871d0c6d..5e4e14c34f999 100644
--- a/posthog/api/query.py
+++ b/posthog/api/query.py
@@ -25,6 +25,7 @@
from posthog.hogql.errors import HogQLException
from posthog.hogql.metadata import get_hogql_metadata
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.lifecycle_hogql_query import run_lifecycle_query
from posthog.models import Team
from posthog.models.event.events_query import run_events_query
from posthog.models.user import User
@@ -32,7 +33,7 @@
from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer
from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions
from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle, TeamRateThrottle
-from posthog.schema import EventsQuery, HogQLQuery, HogQLMetadata
+from posthog.schema import EventsQuery, HogQLQuery, HogQLMetadata, LifecycleQuery
class QueryThrottle(TeamRateThrottle):
@@ -203,22 +204,26 @@ def process_query(team: Team, query_json: Dict, default_limit: Optional[int] = N
if query_kind == "EventsQuery":
events_query = EventsQuery.parse_obj(query_json)
- response = run_events_query(query=events_query, team=team, default_limit=default_limit)
- return _unwrap_pydantic_dict(response)
+ events_response = run_events_query(query=events_query, team=team, default_limit=default_limit)
+ return _unwrap_pydantic_dict(events_response)
elif query_kind == "HogQLQuery":
hogql_query = HogQLQuery.parse_obj(query_json)
- response = execute_hogql_query(
+ hogql_response = execute_hogql_query(
query_type="HogQLQuery",
query=hogql_query.query,
team=team,
filters=hogql_query.filters,
default_limit=default_limit,
)
- return _unwrap_pydantic_dict(response)
+ return _unwrap_pydantic_dict(hogql_response)
elif query_kind == "HogQLMetadata":
metadata_query = HogQLMetadata.parse_obj(query_json)
- response = get_hogql_metadata(query=metadata_query, team=team)
- return _unwrap_pydantic_dict(response)
+ metadata_response = get_hogql_metadata(query=metadata_query, team=team)
+ return _unwrap_pydantic_dict(metadata_response)
+ elif query_kind == "LifecycleQuery":
+ lifecycle_query = LifecycleQuery.parse_obj(query_json)
+ lifecycle_response = run_lifecycle_query(query=lifecycle_query, team=team)
+ return _unwrap_pydantic_dict(lifecycle_response)
elif query_kind == "DatabaseSchemaQuery":
database = create_hogql_database(team.pk)
return serialize_database(database)
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index af0efd4023fe7..245b0ceb08720 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -157,6 +159,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -168,6 +171,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -267,6 +271,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -278,6 +283,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -468,6 +474,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -479,6 +486,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -626,6 +634,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -637,6 +646,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -795,6 +805,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -806,6 +817,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -951,6 +963,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -962,6 +975,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1181,6 +1195,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1192,6 +1207,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1230,6 +1246,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1241,6 +1258,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1376,6 +1394,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1387,6 +1406,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1478,6 +1498,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1489,6 +1510,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1527,6 +1549,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1538,6 +1561,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1671,6 +1695,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1682,6 +1707,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -1789,6 +1815,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -1800,6 +1827,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2040,6 +2068,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2051,6 +2080,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2272,6 +2302,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2283,6 +2314,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2390,6 +2422,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2401,6 +2434,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2501,6 +2535,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2512,6 +2547,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2612,6 +2648,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2623,6 +2660,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2703,6 +2741,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2714,6 +2753,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2845,6 +2885,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2856,6 +2897,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -2933,6 +2975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -2944,6 +2987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3048,6 +3092,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3059,6 +3104,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3163,6 +3209,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3174,6 +3221,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3289,6 +3337,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3300,6 +3349,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3600,6 +3650,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3611,6 +3662,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3750,6 +3802,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3761,6 +3814,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3874,6 +3928,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3885,6 +3940,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -3951,6 +4007,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -3962,6 +4019,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4104,6 +4162,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4115,6 +4174,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4153,6 +4213,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4164,6 +4225,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4268,6 +4330,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4279,6 +4342,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4409,6 +4473,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4420,6 +4485,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4825,6 +4891,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4836,6 +4903,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -4956,6 +5024,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -4967,6 +5036,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5033,6 +5103,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5044,6 +5115,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5148,6 +5220,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5159,6 +5232,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5224,6 +5298,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5235,6 +5310,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5273,6 +5349,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5284,6 +5361,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5388,6 +5466,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5399,6 +5478,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5520,6 +5600,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5531,6 +5612,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -5674,6 +5756,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -5685,6 +5768,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6072,6 +6156,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6083,6 +6168,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6211,6 +6297,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6222,6 +6309,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6383,6 +6471,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6394,6 +6483,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6541,6 +6631,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6552,6 +6643,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6671,6 +6763,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6682,6 +6775,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6752,6 +6846,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6763,6 +6858,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -6908,6 +7004,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -6919,6 +7016,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7538,6 +7636,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7549,6 +7648,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7780,6 +7880,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7791,6 +7892,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7933,6 +8035,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7944,6 +8047,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -7982,6 +8086,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -7993,6 +8098,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8097,6 +8203,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8108,6 +8215,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8238,6 +8346,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8249,6 +8358,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8353,6 +8463,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8364,6 +8475,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8480,6 +8592,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8491,6 +8604,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8612,6 +8726,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8623,6 +8738,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -8912,6 +9028,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -8923,6 +9040,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9058,6 +9176,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9069,6 +9188,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9153,6 +9273,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9164,6 +9285,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9274,6 +9396,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9285,6 +9408,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9392,6 +9516,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9403,6 +9528,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9513,6 +9639,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9524,6 +9651,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9685,6 +9813,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9696,6 +9825,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9834,6 +9964,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9845,6 +9976,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -9929,6 +10061,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -9940,6 +10073,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10081,6 +10215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10092,6 +10227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10248,6 +10384,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10259,6 +10396,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10350,6 +10488,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10361,6 +10500,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10502,6 +10642,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10513,6 +10654,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10631,6 +10773,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10642,6 +10785,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -10831,6 +10975,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -10842,6 +10987,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index 396f5103c7ec3..299074ec3d44b 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -40,6 +40,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -51,6 +52,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -76,6 +78,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -94,6 +97,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -120,6 +124,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -131,6 +136,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -169,6 +175,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -180,6 +187,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -277,6 +285,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -288,6 +297,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -479,6 +489,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -490,6 +501,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -555,6 +567,7 @@
"posthog_notebook"."team_id",
"posthog_notebook"."title",
"posthog_notebook"."content",
+ "posthog_notebook"."text_content",
"posthog_notebook"."deleted",
"posthog_notebook"."version",
"posthog_notebook"."created_at",
@@ -572,6 +585,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -583,6 +597,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -671,6 +686,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -682,6 +698,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py
index 3f49024d708e9..1b7f36ae54ce3 100644
--- a/posthog/api/test/notebooks/test_notebook.py
+++ b/posthog/api/test/notebooks/test_notebook.py
@@ -1,4 +1,4 @@
-from typing import List, Dict, Optional
+from typing import List, Dict
from unittest import mock
from freezegun import freeze_time
@@ -67,17 +67,20 @@ def test_cannot_list_deleted_notebook(self) -> None:
@parameterized.expand(
[
- ("without_content", None),
- ("with_content", {"some": "kind", "of": "tip", "tap": "content"}),
+ ("without_content", None, None),
+ ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"),
]
)
- def test_create_a_notebook(self, _, content: Optional[Dict]) -> None:
- response = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={"content": content})
+ def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None:
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content}
+ )
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == {
"id": response.json()["id"],
"short_id": response.json()["short_id"],
"content": content,
+ "text_content": text_content,
"title": None,
"version": 0,
"created_at": mock.ANY,
diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py
index 4e9f9370c178d..5f634de548fc7 100644
--- a/posthog/api/test/notebooks/test_notebook_filtering.py
+++ b/posthog/api/test/notebooks/test_notebook_filtering.py
@@ -42,7 +42,7 @@
},
}
-BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}]}
+BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text}
class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest):
@@ -62,20 +62,22 @@ def _create_notebook_with_content(self, inner_content: List[Dict[str, Any]], tit
@parameterized.expand(
[
- ["some text", [0]],
- ["other text", [1]],
- ["text", [0, 1]],
+ ["i ride", [0]],
+ ["pony", [0]],
+ ["ponies", [0]],
+ ["my hobby", [1]],
+ ["around", [0, 1]],
["random", []],
]
)
def test_filters_based_on_title(self, search_text: str, expected_match_indexes: List[int]) -> None:
notebook_ids = [
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="some text"),
- self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="other text"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="i ride around on a pony"),
+ self._create_notebook_with_content([BASIC_TEXT("my important notes")], title="my hobby is to fish around"),
]
response = self.client.get(
- f"/api/projects/{self.team.id}/notebooks?s={search_text}",
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
)
assert response.status_code == status.HTTP_200_OK
@@ -83,6 +85,32 @@ def test_filters_based_on_title(self, search_text: str, expected_match_indexes:
assert len(results) == len(expected_match_indexes)
assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+ @parameterized.expand(
+ [
+ ["pony", [0]],
+ ["pOnY", [0]],
+ ["ponies", [0]],
+ ["goat", [1]],
+ ["ride", [0, 1]],
+ ["neither", []],
+ ]
+ )
+ def test_filters_based_on_text_content(self, search_text: str, expected_match_indexes: List[int]) -> None:
+ notebook_ids = [
+ # will match both pony and ponies
+ self._create_notebook_with_content([BASIC_TEXT("you may ride a pony")], title="never matches"),
+ self._create_notebook_with_content([BASIC_TEXT("but may not ride a goat")], title="never matches"),
+ ]
+
+ response = self.client.get(
+ f"/api/projects/{self.team.id}/notebooks?search={search_text}",
+ )
+ assert response.status_code == status.HTTP_200_OK
+
+ results = response.json()["results"]
+ assert len(results) == len(expected_match_indexes)
+ assert sorted([r["id"] for r in results]) == sorted([notebook_ids[i] for i in expected_match_indexes])
+
def test_filters_based_on_params(self) -> None:
other_user = User.objects.create_and_join(self.organization, "other@posthog.com", "password")
notebook_one = Notebook.objects.create(team=self.team, created_by=self.user)
diff --git a/posthog/api/test/test_annotation.py b/posthog/api/test/test_annotation.py
index 82089a5c7ea6b..c559411f607d5 100644
--- a/posthog/api/test/test_annotation.py
+++ b/posthog/api/test/test_annotation.py
@@ -1,7 +1,7 @@
from datetime import datetime
from unittest.mock import patch
-import pytz
+from zoneinfo import ZoneInfo
from django.utils.timezone import now
from rest_framework import status
@@ -111,7 +111,7 @@ def test_creating_annotation(self, mock_capture):
"team": team2.pk, # make sure this is set automatically
},
)
- date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=pytz.UTC)
+ date_marker: datetime = datetime(2020, 1, 1, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
instance = Annotation.objects.get(pk=response.json()["id"])
self.assertEqual(instance.content, "Marketing campaign")
diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py
index 8679485030bf6..b7f746c84a473 100644
--- a/posthog/api/test/test_event.py
+++ b/posthog/api/test/test_event.py
@@ -3,7 +3,7 @@
from unittest.mock import patch
from urllib.parse import unquote, urlencode
-import pytz
+from zoneinfo import ZoneInfo
from dateutil import parser
from dateutil.relativedelta import relativedelta
from django.utils import timezone
@@ -168,7 +168,6 @@ def test_custom_event_values(self):
@also_test_with_materialized_columns(["random_prop"])
@snapshot_clickhouse_queries
def test_event_property_values(self):
-
with freeze_time("2020-01-10"):
_create_event(
distinct_id="bla",
@@ -346,8 +345,8 @@ def test_pagination_bounded_date_range(self):
with freeze_time("2021-10-10T12:03:03.829294Z"):
_create_person(team=self.team, distinct_ids=["1"])
now = timezone.now() - relativedelta(months=11)
- after = (now).astimezone(pytz.utc).isoformat()
- before = (now + relativedelta(days=23)).astimezone(pytz.utc).isoformat()
+ after = (now).astimezone(ZoneInfo("UTC")).isoformat()
+ before = (now + relativedelta(days=23)).astimezone(ZoneInfo("UTC")).isoformat()
params = {"distinct_id": "1", "after": after, "before": before, "limit": 10}
params_string = urlencode(params)
for idx in range(0, 25):
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index 9efebf97b878b..b0d6f73c87ebb 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -939,7 +939,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -950,7 +950,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(9):
+ with self.assertNumQueries(10):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py
index a9697252901f4..8becdf0ac7e60 100644
--- a/posthog/api/test/test_insight.py
+++ b/posthog/api/test/test_insight.py
@@ -5,7 +5,7 @@
from unittest.case import skip
from unittest.mock import patch
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from django.utils import timezone
from freezegun import freeze_time
@@ -1860,7 +1860,7 @@ def test_create_insight_viewed(self) -> None:
self.assertEqual(created_insight_viewed.user, self.user)
self.assertEqual(
created_insight_viewed.last_viewed_at,
- datetime(2022, 3, 22, 0, 0, tzinfo=pytz.UTC),
+ datetime(2022, 3, 22, 0, 0, tzinfo=ZoneInfo("UTC")),
)
def test_update_insight_viewed(self) -> None:
@@ -1882,7 +1882,7 @@ def test_update_insight_viewed(self) -> None:
updated_insight_viewed = InsightViewed.objects.all()[0]
self.assertEqual(
updated_insight_viewed.last_viewed_at,
- datetime(2022, 3, 23, 0, 0, tzinfo=pytz.UTC),
+ datetime(2022, 3, 23, 0, 0, tzinfo=ZoneInfo("UTC")),
)
def test_cant_view_insight_viewed_for_insight_in_another_team(self) -> None:
diff --git a/posthog/api/test/test_organization_domain.py b/posthog/api/test/test_organization_domain.py
index fe4a4e5afa12a..2615880cbff20 100644
--- a/posthog/api/test/test_organization_domain.py
+++ b/posthog/api/test/test_organization_domain.py
@@ -4,7 +4,7 @@
import dns.resolver
import dns.rrset
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.utils import timezone
from freezegun import freeze_time
from rest_framework import status
@@ -133,7 +133,7 @@ def test_creating_domain_on_self_hosted_is_automatically_verified(self):
instance = OrganizationDomain.objects.get(id=response_data["id"])
self.assertEqual(instance.domain, "the.posthog.com")
- self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC))
+ self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")))
self.assertEqual(instance.last_verification_retry, None)
self.assertEqual(instance.sso_enforcement, "")
@@ -200,7 +200,7 @@ def test_can_request_verification_for_unverified_domains(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ"))
self.assertEqual(response_data["is_verified"], True)
- self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=pytz.UTC))
+ self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")))
self.assertEqual(self.domain.is_verified, True)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -220,7 +220,7 @@ def test_domain_is_not_verified_with_missing_challenge(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -240,7 +240,7 @@ def test_domain_is_not_verified_with_missing_domain(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
@patch("posthog.models.organization_domain.dns.resolver.resolve")
@@ -262,7 +262,7 @@ def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query):
self.assertEqual(response_data["verified_at"], None)
self.assertEqual(self.domain.verified_at, None)
self.assertEqual(
- self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=pytz.UTC)
+ self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC"))
)
def test_cannot_request_verification_for_verified_domains(self):
diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py
index d393b00910ab3..c9ae3b26c359a 100644
--- a/posthog/api/test/test_plugin.py
+++ b/posthog/api/test/test_plugin.py
@@ -5,7 +5,7 @@
from unittest import mock
from unittest.mock import ANY, patch
-import pytz
+from zoneinfo import ZoneInfo
from django.core.files.uploadedfile import SimpleUploadedFile
from freezegun import freeze_time
from rest_framework import status
@@ -269,7 +269,7 @@ def test_update_plugin_auth(self, mock_sync_from_plugin_archive, mock_get, mock_
plugin = Plugin.objects.get(id=response.json()["id"])
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertNotEqual(plugin.updated_at, fake_date)
with freeze_time(fake_date.isoformat()):
@@ -715,7 +715,7 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload):
name="FooBar2", plugins_access_level=Organization.PluginsAccessLevel.INSTALL
)
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
with freeze_time(fake_date.isoformat()):
response = self.client.post(
f"/api/organizations/{my_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"}
@@ -1281,7 +1281,7 @@ def test_check_for_updates_plugins_reload_not_called(self, _, mock_reload):
plugin_id = response.json()["id"]
plugin = Plugin.objects.get(id=plugin_id)
- fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
self.assertNotEqual(plugin.latest_tag_checked_at, fake_date)
with freeze_time(fake_date.isoformat()):
diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py
index c4439a8913b31..82e89c7805c5c 100644
--- a/posthog/api/test/test_signup.py
+++ b/posthog/api/test/test_signup.py
@@ -5,7 +5,7 @@
from unittest.mock import ANY, patch
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.core import mail
from django.urls.base import reverse
from django.utils import timezone
@@ -733,7 +733,7 @@ def test_api_invite_sign_up_prevalidate_expired_invite(self):
invite: OrganizationInvite = OrganizationInvite.objects.create(
target_email="test+59@posthog.com", organization=self.organization
)
- invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=pytz.UTC)
+ invite.created_at = datetime.datetime(2020, 12, 1, tzinfo=ZoneInfo("UTC"))
invite.save()
response = self.client.get(f"/api/signup/{invite.id}/")
@@ -1132,7 +1132,7 @@ def test_cant_claim_expired_invite(self):
invite: OrganizationInvite = OrganizationInvite.objects.create(
target_email="test+799@posthog.com", organization=self.organization
)
- invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=pytz.UTC)
+ invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=ZoneInfo("UTC"))
invite.save()
response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Charlie", "password": "test_password"})
diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py
index 45e13024c1a0b..f393e5cec4379 100644
--- a/posthog/api/test/test_survey.py
+++ b/posthog/api/test/test_survey.py
@@ -77,6 +77,80 @@ def test_can_create_survey_with_linked_flag_and_targeting(self):
{"type": "open", "question": "What would you want to improve from notebooks?"}
]
+ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None:
+ self.maxDiff = None
+
+ ff_key = "notebooks"
+ notebooks_flag = FeatureFlag.objects.create(team=self.team, key=ff_key, created_by=self.user)
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks power users survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "targeting_flag_filters": {
+ "groups": [
+ {
+ "variant": None,
+ "rollout_percentage": None,
+ "properties": [
+ {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}
+ ],
+ }
+ ]
+ },
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert FeatureFlag.objects.filter(id=response_data["targeting_flag"]["id"]).exists()
+
+ created_survey1 = response.json()["id"]
+
+ response = self.client.post(
+ f"/api/projects/{self.team.id}/surveys/",
+ data={
+ "name": "Notebooks random survey",
+ "type": "popover",
+ "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}],
+ "linked_flag_id": notebooks_flag.id,
+ "conditions": {"url": "https://app.posthog.com/notebooks"},
+ },
+ format="json",
+ )
+
+ response_data = response.json()
+ assert response.status_code == status.HTTP_201_CREATED, response_data
+ assert response_data["linked_flag"]["id"] == notebooks_flag.id
+ assert response_data["targeting_flag"] is None
+
+ created_survey2 = response.json()["id"]
+
+ # add another random feature flag
+ self.client.post(
+ f"/api/projects/{self.team.id}/feature_flags/",
+ data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}},
+ format="json",
+ ).json()
+
+ with self.assertNumQueries(12):
+ response = self.client.get(f"/api/projects/{self.team.id}/feature_flags")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ result = response.json()
+
+ self.assertEqual(result["count"], 2)
+
+ self.assertEqual(
+ [(res["key"], [survey["id"] for survey in res["surveys"]]) for res in result["results"]],
+ [("flag_0", []), (ff_key, [created_survey1, created_survey2])],
+ )
+
def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self):
survey_with_targeting = self.client.post(
f"/api/projects/{self.team.id}/surveys/",
diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py
index 5aa0fa7d18e22..b5eb182e68a70 100644
--- a/posthog/batch_exports/service.py
+++ b/posthog/batch_exports/service.py
@@ -52,6 +52,8 @@ class S3BatchExportInputs:
data_interval_end: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
@dataclass
diff --git a/posthog/caching/test/test_should_refresh_insight.py b/posthog/caching/test/test_should_refresh_insight.py
index 26fcfaf01531a..12fb385ef2926 100644
--- a/posthog/caching/test/test_should_refresh_insight.py
+++ b/posthog/caching/test/test_should_refresh_insight.py
@@ -1,9 +1,9 @@
from datetime import datetime, timedelta
from time import sleep
from unittest.mock import patch
+from zoneinfo import ZoneInfo
from django.http import HttpRequest
-import pytz
from freezegun import freeze_time
from rest_framework.request import Request
from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME
@@ -25,7 +25,7 @@ def __init__(self, *args, **kwargs) -> None:
def test_should_return_true_if_refresh_not_requested(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
# .GET["refresh"] is absent in the request below!
@@ -47,7 +47,7 @@ def test_should_return_true_if_refresh_not_requested(self):
def test_should_return_true_if_refresh_requested(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
should_refresh_now, refresh_frequency = should_refresh_insight(insight, None, request=self.refresh_request)
@@ -67,7 +67,7 @@ def test_should_return_true_if_insight_does_not_have_last_refresh(self):
def test_shared_insights_can_be_refreshed_less_often(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1)
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1)
)
should_refresh_now, refresh_frequency = should_refresh_insight(
@@ -130,7 +130,7 @@ def test_dashboard_filters_should_override_insight_filters_when_deciding_on_refr
def test_should_return_true_if_was_recently_refreshed(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC"))
+ last_refresh=datetime.now(tz=ZoneInfo("UTC"))
)
request = HttpRequest()
@@ -143,10 +143,10 @@ def test_should_return_true_if_was_recently_refreshed(self):
def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mock_sleep):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1),
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1),
# This insight is being calculated _somewhere_, since it was last refreshed
# earlier than the recent refresh has been queued
- last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC"))
+ last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC"))
- timedelta(seconds=CLICKHOUSE_MAX_EXECUTION_TIME - 0.5), # Half a second before timeout
)
@@ -161,10 +161,10 @@ def test_should_return_true_if_refresh_just_about_to_time_out_elsewhere(self, mo
def test_should_return_true_if_refresh_timed_out_elsewhere_before(self):
insight, _, _ = _create_insight(self.team, {"events": [{"id": "$autocapture"}], "interval": "month"}, {})
InsightCachingState.objects.filter(team=self.team, insight_id=insight.pk).update(
- last_refresh=datetime.now(tz=pytz.timezone("UTC")) - timedelta(days=1),
+ last_refresh=datetime.now(tz=ZoneInfo("UTC")) - timedelta(days=1),
# last_refresh is earlier than last_refresh_queued_at BUT last_refresh_queued_at is more than
# CLICKHOUSE_MAX_EXECUTION_TIME seconds ago. This means the query CANNOT be running at this time.
- last_refresh_queued_at=datetime.now(tz=pytz.timezone("UTC")) - timedelta(seconds=500),
+ last_refresh_queued_at=datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=500),
)
should_refresh_now, _ = should_refresh_insight(insight, None, request=self.refresh_request)
diff --git a/posthog/clickhouse/migrations/0048_session_replay_events_count.py b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
new file mode 100644
index 0000000000000..d4676e2794884
--- /dev/null
+++ b/posthog/clickhouse/migrations/0048_session_replay_events_count.py
@@ -0,0 +1,26 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.models.session_replay_event.migrations_sql import (
+ DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+from posthog.models.session_replay_event.sql import (
+ SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+
+operations = [
+ # we have to drop materialized view first so that we're no longer pulling from kakfa
+ # then we drop the kafka table
+ run_sql_with_exceptions(DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+ run_sql_with_exceptions(DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # now we can alter the target tables
+ run_sql_with_exceptions(ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # and then recreate the materialized views and kafka tables
+ run_sql_with_exceptions(KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+]
diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py
index 2f0924b080319..417525330a96c 100644
--- a/posthog/clickhouse/system_status.py
+++ b/posthog/clickhouse/system_status.py
@@ -1,7 +1,7 @@
from datetime import timedelta
from os.path import abspath, dirname, join
from typing import Dict, Generator, List, Tuple
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.relativedelta import relativedelta
from django.utils import timezone
@@ -103,7 +103,7 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
last_event_ingested_timestamp = sync_execute("SELECT max(_timestamp) FROM events")[0][0]
# Therefore we can confidently apply the UTC timezone
- last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=pytz.UTC)
+ last_event_ingested_timestamp_utc = last_event_ingested_timestamp.replace(tzinfo=ZoneInfo("UTC"))
yield {
"key": "last_event_ingested_timestamp",
diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
index 36ab529259c77..ac21b1ac5989f 100644
--- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr
+++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
@@ -336,7 +336,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -922,7 +924,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = Kafka('kafka:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'
@@ -1344,7 +1348,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_session_replay_events', sipHash64(distinct_id))
'
@@ -1377,7 +1389,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
- sum(size) as size
+ sum(size) as size,
+ -- we can count the number of kafka messages instead of sending it explicitly
+ sum(message_count) as message_count,
+ sum(event_count) as event_count,
+ max(_timestamp) as _timestamp
FROM posthog_test.kafka_session_replay_events
group by session_id, team_id
@@ -1608,7 +1624,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
@@ -2226,7 +2250,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
PARTITION BY toYYYYMM(min_first_timestamp)
diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py
index dd337d487aba7..f0d33c7d617f4 100644
--- a/posthog/clickhouse/test/test_person_overrides.py
+++ b/posthog/clickhouse/test/test_person_overrides.py
@@ -5,7 +5,7 @@
from uuid import UUID, uuid4
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from kafka import KafkaProducer
from posthog.clickhouse.client import sync_execute
@@ -35,9 +35,9 @@ def test_can_insert_person_overrides():
old_person_id = uuid4()
override_person_id = uuid4()
oldest_event_string = "2020-01-01 00:00:00"
- oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=pytz.UTC)
+ oldest_event = datetime.fromisoformat(oldest_event_string).replace(tzinfo=ZoneInfo("UTC"))
merged_at_string = "2020-01-02 00:00:00"
- merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=pytz.UTC)
+ merged_at = datetime.fromisoformat(merged_at_string).replace(tzinfo=ZoneInfo("UTC"))
message = {
"team_id": 1,
"old_person_id": str(old_person_id),
@@ -82,7 +82,7 @@ def test_can_insert_person_overrides():
[result] = results
created_at, *the_rest = result
assert the_rest == [1, old_person_id, override_person_id, oldest_event, merged_at, 2]
- assert created_at > datetime.now(tz=pytz.UTC) - timedelta(seconds=10)
+ assert created_at > datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=10)
finally:
producer.close()
diff --git a/posthog/datetime.py b/posthog/datetime.py
index 8dc6b6975fb8d..b8c4910e8b374 100644
--- a/posthog/datetime.py
+++ b/posthog/datetime.py
@@ -1,13 +1,22 @@
from datetime import datetime, timedelta
-def end_of_day(reference_date: datetime):
- return datetime(
- year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo
- ) + timedelta(days=1, microseconds=-1)
+def start_of_hour(dt: datetime) -> datetime:
+ return datetime(year=dt.year, month=dt.month, day=dt.day, hour=dt.hour, tzinfo=dt.tzinfo)
-def start_of_day(reference_date: datetime):
- return datetime(
- year=reference_date.year, month=reference_date.month, day=reference_date.day, tzinfo=reference_date.tzinfo
- )
+def start_of_day(dt: datetime):
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo)
+
+
+def end_of_day(dt: datetime):
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) + timedelta(days=1, microseconds=-1)
+
+
+def start_of_week(dt: datetime) -> datetime:
+ # weeks start on sunday
+ return datetime(year=dt.year, month=dt.month, day=dt.day, tzinfo=dt.tzinfo) - timedelta(days=(dt.weekday() + 1) % 7)
+
+
+def start_of_month(dt: datetime) -> datetime:
+ return datetime(year=dt.year, month=dt.month, day=1, tzinfo=dt.tzinfo)
diff --git a/posthog/decorators.py b/posthog/decorators.py
index 2cefc1bb23f53..19b1bc33f98ae 100644
--- a/posthog/decorators.py
+++ b/posthog/decorators.py
@@ -1,7 +1,10 @@
+from datetime import datetime
from enum import Enum
from functools import wraps
from typing import Any, Callable, Dict, List, TypeVar, Union, cast
+from zoneinfo import ZoneInfo
+import posthoganalytics
from django.urls import resolve
from django.utils.timezone import now
from rest_framework.request import Request
@@ -9,8 +12,15 @@
from statshog.defaults.django import statsd
from posthog.clickhouse.query_tagging import tag_queries
+from posthog.cloud_utils import is_cloud
+from posthog.datetime import start_of_day, start_of_hour, start_of_month, start_of_week
from posthog.models import User
+from posthog.models.filters.filter import Filter
+from posthog.models.filters.path_filter import PathFilter
+from posthog.models.filters.retention_filter import RetentionFilter
+from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.filters.utils import get_filter
+from posthog.models.team.team import Team
from posthog.utils import refresh_requested_by_client
from .utils import generate_cache_key, get_safe_cache
@@ -74,9 +84,12 @@ def wrapper(self, request) -> T:
route = "unknown"
if cached_result_package and cached_result_package.get("result"):
- cached_result_package["is_cached"] = True
- statsd.incr("posthog_cached_function_cache_hit", tags={"route": route})
- return cached_result_package
+ if not is_stale(team, filter, cached_result_package):
+ cached_result_package["is_cached"] = True
+ statsd.incr("posthog_cached_function_cache_hit", tags={"route": route})
+ return cached_result_package
+ else:
+ statsd.incr("posthog_cached_function_cache_stale", tags={"route": route})
else:
statsd.incr("posthog_cached_function_cache_miss", tags={"route": route})
@@ -93,3 +106,49 @@ def wrapper(self, request) -> T:
return fresh_result_package
return wrapper
+
+
+def stale_cache_invalidation_disabled(team: Team) -> bool:
+ """Can be disabled temporarly to help in cases of service degradation."""
+ if is_cloud(): # on PostHog Cloud, use the feature flag
+ return not posthoganalytics.feature_enabled(
+ "stale-cache-invalidation-enabled",
+ str(team.uuid),
+ groups={"organization": str(team.organization.id)},
+ group_properties={
+ "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at}
+ },
+ only_evaluate_locally=True,
+ send_feature_flag_events=False,
+ )
+ else:
+ return False
+
+
+def is_stale(team: Team, filter: Filter | RetentionFilter | StickinessFilter | PathFilter, cached_result: Any) -> bool:
+ """Indicates wether a cache item is obviously outdated based on filters,
+ i.e. the next time interval was entered since the last computation. For
+ example an insight with -7d date range that was last computed yesterday.
+ The same insight refreshed today wouldn't be marked as stale.
+ """
+
+ if stale_cache_invalidation_disabled(team):
+ return False
+
+ last_refresh = cached_result.get("last_refresh", None)
+ date_to = min([filter.date_to, datetime.now(tz=ZoneInfo("UTC"))]) # can't be later than now
+ interval = filter.period.lower() if isinstance(filter, RetentionFilter) else filter.interval
+
+ if last_refresh is None:
+ raise Exception("Cached results require a last_refresh")
+
+ if interval == "hour":
+ return start_of_hour(date_to) > start_of_hour(last_refresh)
+ elif interval == "day":
+ return start_of_day(date_to) > start_of_day(last_refresh)
+ elif interval == "week":
+ return start_of_week(date_to) > start_of_week(last_refresh)
+ elif interval == "month":
+ return start_of_month(date_to) > start_of_month(last_refresh)
+ else:
+ return False
diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py
index 68272413aa2ba..132f3d6ac5f32 100644
--- a/posthog/demo/products/hedgebox/models.py
+++ b/posthog/demo/products/hedgebox/models.py
@@ -14,6 +14,7 @@
)
import pytz
+from zoneinfo import ZoneInfo
from posthog.demo.matrix.models import Effect, SimPerson, SimSessionIntent
@@ -673,7 +674,7 @@ def upgrade_plan(self):
if not self.account.was_billing_scheduled:
self.account.was_billing_scheduled = True
future_months = math.ceil(
- (self.cluster.end.astimezone(pytz.timezone(self.timezone)) - self.cluster.simulation_time).days / 30
+ (self.cluster.end.astimezone(ZoneInfo(self.timezone)) - self.cluster.simulation_time).days / 30
)
for i in range(future_months):
bill_timestamp = self.cluster.simulation_time + dt.timedelta(days=30 * i)
diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py
index 27463e1bd692d..99f0451c5485d 100644
--- a/posthog/demo/test/test_matrix_manager.py
+++ b/posthog/demo/test/test_matrix_manager.py
@@ -2,7 +2,7 @@
from enum import auto
from typing import Optional
-import pytz
+from zoneinfo import ZoneInfo
from posthog.client import sync_execute
from posthog.demo.matrix.manager import MatrixManager
@@ -54,7 +54,9 @@ class TestMatrixManager(ClickhouseDestroyTablesMixin):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
- cls.matrix = DummyMatrix(n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=pytz.UTC), days_future=0)
+ cls.matrix = DummyMatrix(
+ n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), days_future=0
+ )
cls.matrix.simulate()
def test_reset_master(self):
diff --git a/posthog/errors.py b/posthog/errors.py
index 5cd3342f7a3fa..b2d34ed858448 100644
--- a/posthog/errors.py
+++ b/posthog/errors.py
@@ -206,7 +206,7 @@ def look_up_error_code_meta(error: ServerException) -> ErrorCodeMeta:
131: ErrorCodeMeta("TOO_LARGE_STRING_SIZE"),
133: ErrorCodeMeta("AGGREGATE_FUNCTION_DOESNT_ALLOW_PARAMETERS"),
134: ErrorCodeMeta("PARAMETERS_TO_AGGREGATE_FUNCTIONS_MUST_BE_LITERALS"),
- 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX"),
+ 135: ErrorCodeMeta("ZERO_ARRAY_OR_TUPLE_INDEX", user_safe=True),
137: ErrorCodeMeta("UNKNOWN_ELEMENT_IN_CONFIG"),
138: ErrorCodeMeta("EXCESSIVE_ELEMENT_IN_CONFIG"),
139: ErrorCodeMeta("NO_ELEMENTS_IN_CONFIG"),
diff --git a/posthog/hogql/database/schema/session_replay_events.py b/posthog/hogql/database/schema/session_replay_events.py
index c4f1980df5491..b8d79e86d9780 100644
--- a/posthog/hogql/database/schema/session_replay_events.py
+++ b/posthog/hogql/database/schema/session_replay_events.py
@@ -31,6 +31,8 @@
"console_warn_count": IntegerDatabaseField(name="console_warn_count"),
"console_error_count": IntegerDatabaseField(name="console_error_count"),
"size": IntegerDatabaseField(name="size"),
+ "event_count": IntegerDatabaseField(name="event_count"),
+ "message_count": IntegerDatabaseField(name="message_count"),
"pdi": LazyJoin(
from_field="distinct_id",
join_table=PersonDistinctIdsTable(),
@@ -77,6 +79,8 @@ def select_from_session_replay_events_table(requested_fields: Dict[str, List[str
"console_error_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "console_error_count"])]),
"distinct_id": ast.Call(name="any", args=[ast.Field(chain=[table_name, "distinct_id"])]),
"size": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "size"])]),
+ "event_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "event_count"])]),
+ "message_count": ast.Call(name="sum", args=[ast.Field(chain=[table_name, "message_count"])]),
}
select_fields: List[ast.Expr] = []
diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr
index 166391d344856..9e1413d84a0bf 100644
--- a/posthog/hogql/database/test/__snapshots__/test_database.ambr
+++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr
@@ -276,6 +276,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -405,6 +413,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -849,6 +865,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
@@ -978,6 +1002,14 @@
"key": "size",
"type": "integer"
},
+ {
+ "key": "event_count",
+ "type": "integer"
+ },
+ {
+ "key": "message_count",
+ "type": "integer"
+ },
{
"key": "pdi",
"type": "lazy_table",
diff --git a/posthog/hogql/placeholders.py b/posthog/hogql/placeholders.py
index 670b98cfd45e5..bd63ce32754c0 100644
--- a/posthog/hogql/placeholders.py
+++ b/posthog/hogql/placeholders.py
@@ -32,7 +32,7 @@ def __init__(self, placeholders: Optional[Dict[str, ast.Expr]]):
def visit_placeholder(self, node):
if not self.placeholders:
raise HogQLException(f"Placeholders, such as {{{node.field}}}, are not supported in this context")
- if node.field in self.placeholders:
+ if node.field in self.placeholders and self.placeholders[node.field] is not None:
new_node = self.placeholders[node.field]
new_node.start = node.start
new_node.end = node.end
diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py
index 3caa10d51f8f6..81efafc225a1f 100644
--- a/posthog/hogql/property.py
+++ b/posthog/hogql/property.py
@@ -15,7 +15,7 @@
from posthog.models.property import PropertyGroup
from posthog.models.property.util import build_selector_regex
from posthog.models.property_definition import PropertyType
-from posthog.schema import PropertyOperator
+from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator
def has_aggregation(expr: AST) -> bool:
@@ -59,16 +59,30 @@ def property_to_expr(property: Union[BaseModel, PropertyGroup, Property, dict, l
return ast.And(exprs=properties)
elif isinstance(property, Property):
pass
- elif isinstance(property, PropertyGroup):
- if property.type != PropertyOperatorType.AND and property.type != PropertyOperatorType.OR:
+ elif (
+ isinstance(property, PropertyGroup)
+ or isinstance(property, PropertyGroupFilter)
+ or isinstance(property, PropertyGroupFilterValue)
+ ):
+ if (
+ isinstance(property, PropertyGroup)
+ and property.type != PropertyOperatorType.AND
+ and property.type != PropertyOperatorType.OR
+ ):
raise NotImplementedException(f'PropertyGroup of unknown type "{property.type}"')
+ if (
+ (isinstance(property, PropertyGroupFilter) or isinstance(property, PropertyGroupFilterValue))
+ and property.type != FilterLogicalOperator.AND
+ and property.type != FilterLogicalOperator.OR
+ ):
+ raise NotImplementedException(f'PropertyGroupFilter of unknown type "{property.type}"')
if len(property.values) == 0:
return ast.Constant(value=True)
if len(property.values) == 1:
return property_to_expr(property.values[0], team)
- if property.type == PropertyOperatorType.AND:
+ if property.type == PropertyOperatorType.AND or property.type == FilterLogicalOperator.AND:
return ast.And(exprs=[property_to_expr(p, team) for p in property.values])
else:
return ast.Or(exprs=[property_to_expr(p, team) for p in property.values])
diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py
index e939d9ce8aef6..ed84eeaf4af6d 100644
--- a/posthog/hogql/test/test_query.py
+++ b/posthog/hogql/test/test_query.py
@@ -1,6 +1,6 @@
from uuid import UUID
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from django.utils import timezone
from freezegun import freeze_time
@@ -817,21 +817,21 @@ def test_window_functions_simple(self):
expected += [
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")),
"random event",
[],
["random bla", "random boo"],
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")),
"random bla",
["random event"],
["random boo"],
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")),
"random boo",
["random event", "random bla"],
[],
@@ -902,7 +902,7 @@ def test_window_functions_with_window(self):
expected += [
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 00, 00, tzinfo=ZoneInfo("UTC")),
"random event",
[],
["random bla", "random boo"],
@@ -917,7 +917,7 @@ def test_window_functions_with_window(self):
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 10, 00, tzinfo=ZoneInfo("UTC")),
"random bla",
["random event"],
["random boo"],
@@ -932,7 +932,7 @@ def test_window_functions_with_window(self):
),
(
f"person_{person}_{random_uuid}",
- datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=pytz.UTC),
+ datetime.datetime(2020, 1, 10, 00, 20, 00, tzinfo=ZoneInfo("UTC")),
"random boo",
["random event", "random bla"],
[],
@@ -1226,7 +1226,7 @@ def test_null_equality(self):
("null", "!~*", "null", 0),
]
- for (a, op, b, res) in expected:
+ for a, op, b, res in expected:
# works when selecting directly
query = f"select {a} {op} {b}"
response = execute_hogql_query(query, team=self.team)
diff --git a/posthog/hogql_queries/lifecycle_hogql_query.py b/posthog/hogql_queries/lifecycle_hogql_query.py
index 2df71a976d1a9..6b73034fdfcf3 100644
--- a/posthog/hogql_queries/lifecycle_hogql_query.py
+++ b/posthog/hogql_queries/lifecycle_hogql_query.py
@@ -1,43 +1,27 @@
-from typing import Dict, Any
+from typing import Optional
from django.utils.timezone import datetime
from posthog.hogql import ast
from posthog.hogql.parser import parse_expr, parse_select
+from posthog.hogql.property import property_to_expr, action_to_expr
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql.timings import HogQLTimings
+from posthog.models import Team, Action
from posthog.hogql_queries.query_date_range import QueryDateRange
-from posthog.models import Team
-from posthog.schema import LifecycleQuery
+from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse
-def create_time_filter(date_range: QueryDateRange) -> ast.Expr:
- # don't need timezone here, as HogQL will use the project timezone automatically
- # :TRICKY: We fetch all data even for the period before the graph starts up until the end of the last period
- time_filter = parse_expr(
- """
- (timestamp >= dateTrunc({interval}, {date_from}) - {one_interval_period})
- AND
- (timestamp < dateTrunc({interval}, {date_to}) + {one_interval_period})
- """,
- placeholders={
- "date_from": date_range.date_from_as_hogql,
- "date_to": date_range.date_to_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
- "interval": date_range.interval_period_string_as_hogql,
- },
- )
-
- return time_filter
-
-
-def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr):
- if not event_filter:
- event_filter = ast.Constant(value=True)
-
+def create_events_query(
+ query_date_range: QueryDateRange,
+ event_filter: Optional[ast.Expr],
+ timings: HogQLTimings,
+ sampling_factor: Optional[float] = None,
+):
placeholders = {
- "event_filter": event_filter,
- "interval": date_range.interval_period_string_as_hogql,
- "one_interval_period": date_range.one_interval_period_as_hogql,
+ "event_filter": event_filter or ast.Constant(value=True),
+ "interval": query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval_period": query_date_range.one_interval_period(),
}
events_query = parse_select(
@@ -61,105 +45,182 @@ def create_events_query(date_range: QueryDateRange, event_filter: ast.Expr):
GROUP BY person_id
""",
placeholders=placeholders,
+ timings=timings,
)
- return events_query
+ if sampling_factor is not None and isinstance(sampling_factor, float):
+ sample_expr = ast.SampleExpr(sample_value=ast.RatioExpr(left=ast.Constant(value=sampling_factor)))
+ events_query.select_from.sample = sample_expr
-def run_lifecycle_query(
- team: Team,
- query: LifecycleQuery,
-) -> Dict[str, Any]:
- now_dt = datetime.now()
+ return events_query
- query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt)
- interval = query_date_range.interval.name
- one_interval_period = query_date_range.one_interval_period_as_hogql
- number_interval_period = query_date_range.interval_periods_as_hogql("number")
+def run_lifecycle_query(team: Team, query: LifecycleQuery) -> LifecycleQueryResponse:
+ now_dt = datetime.now()
+ timings = HogQLTimings()
+
+ event_filter = []
+ with timings.measure("date_range"):
+ query_date_range = QueryDateRange(date_range=query.dateRange, team=team, interval=query.interval, now=now_dt)
+ event_filter.append(
+ parse_expr(
+ "timestamp >= dateTrunc({interval}, {date_from}) - {one_interval}",
+ {
+ "interval": query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": query_date_range.one_interval_period(),
+ "date_from": query_date_range.date_from_as_hogql(),
+ },
+ timings=timings,
+ )
+ )
+ event_filter.append(
+ parse_expr(
+ "timestamp < dateTrunc({interval}, {date_to}) + {one_interval}",
+ {
+ "interval": query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval": query_date_range.one_interval_period(),
+ "date_to": query_date_range.date_to_as_hogql(),
+ },
+ timings=timings,
+ )
+ )
- time_filter = create_time_filter(query_date_range)
- event_filter = time_filter # TODO: add all other filters
+ with timings.measure("properties"):
+ if query.properties is not None and query.properties != []:
+ event_filter.append(property_to_expr(query.properties, team))
+
+ with timings.measure("series_filters"):
+ for serie in query.series or []:
+ if isinstance(serie, ActionsNode):
+ action = Action.objects.get(pk=int(serie.id), team=team)
+ event_filter.append(action_to_expr(action))
+ elif isinstance(serie, EventsNode):
+ if serie.event is not None:
+ event_filter.append(
+ ast.CompareOperation(
+ op=ast.CompareOperationOp.Eq,
+ left=ast.Field(chain=["event"]),
+ right=ast.Constant(value=str(serie.event)),
+ )
+ )
+ else:
+ raise ValueError(f"Invalid serie kind: {serie.kind}")
+ if serie.properties is not None and serie.properties != []:
+ event_filter.append(property_to_expr(serie.properties, team))
+
+ with timings.measure("test_account_filters"):
+ if (
+ query.filterTestAccounts
+ and isinstance(team.test_account_filters, list)
+ and len(team.test_account_filters) > 0
+ ):
+ for property in team.test_account_filters:
+ event_filter.append(property_to_expr(property, team))
+
+ if len(event_filter) == 0:
+ event_filter = ast.Constant(value=True)
+ elif len(event_filter) == 1:
+ event_filter = event_filter[0]
+ else:
+ event_filter = ast.And(exprs=event_filter)
placeholders = {
- "interval": ast.Constant(value=interval),
- "one_interval_period": one_interval_period,
- "number_interval_period": number_interval_period,
+ "interval": query_date_range.interval_period_string_as_hogql_constant(),
+ "one_interval_period": query_date_range.one_interval_period(),
+ "number_interval_period": query_date_range.number_interval_periods(),
"event_filter": event_filter,
- "date_from": query_date_range.date_from_as_hogql,
- "date_to": query_date_range.date_to_as_hogql,
+ "date_from": query_date_range.date_from_as_hogql(),
+ "date_to": query_date_range.date_to_as_hogql(),
}
- events_query = create_events_query(date_range=query_date_range, event_filter=event_filter)
+ with timings.measure("events_query"):
+ events_query = create_events_query(
+ query_date_range=query_date_range,
+ event_filter=event_filter,
+ sampling_factor=query.samplingFactor,
+ timings=timings,
+ )
- periods = parse_select(
- """
- SELECT (
- dateTrunc({interval}, {date_to}) - {number_interval_period}
- ) AS start_of_period
- FROM numbers(
- dateDiff(
- {interval},
- dateTrunc({interval}, {date_from}),
- dateTrunc({interval}, {date_to} + {one_interval_period})
+ with timings.measure("periods_query"):
+ periods = parse_select(
+ """
+ SELECT (
+ dateTrunc({interval}, {date_to}) - {number_interval_period}
+ ) AS start_of_period
+ FROM numbers(
+ dateDiff(
+ {interval},
+ dateTrunc({interval}, {date_from}),
+ dateTrunc({interval}, {date_to} + {one_interval_period})
+ )
)
- )
- """,
- placeholders=placeholders,
- )
+ """,
+ placeholders=placeholders,
+ timings=timings,
+ )
- lifecycle_sql = parse_select(
- """
- SELECT groupArray(start_of_period) AS date,
- groupArray(counts) AS total,
- status
- FROM (
- SELECT
- status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
- start_of_period,
- status
+ with timings.measure("lifecycle_query"):
+ lifecycle_sql = parse_select(
+ """
+ SELECT groupArray(start_of_period) AS date,
+ groupArray(counts) AS total,
+ status
FROM (
SELECT
- periods.start_of_period as start_of_period,
- 0 AS counts,
+ status = 'dormant' ? negate(sum(counts)) : negate(negate(sum(counts))) as counts,
+ start_of_period,
status
- FROM {periods} as periods
- CROSS JOIN (
- SELECT status
- FROM (SELECT 1)
- ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
- ) as sec
- ORDER BY status, start_of_period
- UNION ALL
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
+ FROM (
+ SELECT
+ periods.start_of_period as start_of_period,
+ 0 AS counts,
+ status
+ FROM {periods} as periods
+ CROSS JOIN (
+ SELECT status
+ FROM (SELECT 1)
+ ARRAY JOIN ['new', 'returning', 'resurrecting', 'dormant'] as status
+ ) as sec
+ ORDER BY status, start_of_period
+ UNION ALL
+ SELECT
+ start_of_period, count(DISTINCT person_id) AS counts, status
+ FROM {events_query}
+ GROUP BY start_of_period, status
+ )
+ WHERE start_of_period <= dateTrunc({interval}, {date_to})
+ AND start_of_period >= dateTrunc({interval}, {date_from})
GROUP BY start_of_period, status
+ ORDER BY start_of_period ASC
)
- WHERE start_of_period <= dateTrunc({interval}, {date_to})
- AND start_of_period >= dateTrunc({interval}, {date_from})
- GROUP BY start_of_period, status
- ORDER BY start_of_period ASC
- )
- GROUP BY status
- """,
- {**placeholders, "periods": periods, "events_query": events_query},
- )
+ GROUP BY status
+ """,
+ {**placeholders, "periods": periods, "events_query": events_query},
+ timings=timings,
+ )
response = execute_hogql_query(
team=team,
query=lifecycle_sql,
query_type="LifecycleQuery",
+ timings=timings,
)
# ensure that the items are in a deterministic order
order = {"new": 1, "returning": 2, "resurrecting": 3, "dormant": 4}
- results = sorted(response.results, key=lambda result: order.get(result[2], result[2]))
+ results = sorted(response.results, key=lambda result: order.get(result[2], 5))
res = []
for val in results:
counts = val[1]
- labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if interval == "hour" else "")) for item in val[0]]
- days = [item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if interval == "hour" else "")) for item in val[0]]
+ labels = [
+ item.strftime("%-d-%b-%Y{}".format(" %H:%M" if query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
+ days = [
+ item.strftime("%Y-%m-%d{}".format(" %H:%M:%S" if query_date_range.interval_name == "hour" else ""))
+ for item in val[0]
+ ]
label = "{} - {}".format("", val[2]) # entity.name
additional_values = {"label": label, "status": val[2]}
@@ -173,4 +234,4 @@ def run_lifecycle_query(
}
)
- return {"result": res}
+ return LifecycleQueryResponse(result=res, timings=response.timings)
diff --git a/posthog/hogql_queries/query_date_range.py b/posthog/hogql_queries/query_date_range.py
index 4d76b222deb2b..35695b37181c5 100644
--- a/posthog/hogql_queries/query_date_range.py
+++ b/posthog/hogql_queries/query_date_range.py
@@ -1,11 +1,12 @@
+import re
+from functools import cached_property
from datetime import datetime
-from functools import cached_property, lru_cache
from typing import Optional
+from zoneinfo import ZoneInfo
-import pytz
from dateutil.relativedelta import relativedelta
-from posthog.hogql.parser import parse_expr, ast
+from posthog.hogql.parser import ast
from posthog.models.team import Team
from posthog.queries.util import get_earliest_timestamp
from posthog.schema import DateRange, IntervalType
@@ -19,96 +20,96 @@ class QueryDateRange:
_team: Team
_date_range: Optional[DateRange]
_interval: Optional[IntervalType]
- _now_non_timezone: datetime
+ _now_without_timezone: datetime
def __init__(
self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime
) -> None:
self._team = team
self._date_range = date_range
- self._interval = interval
- self._now_non_timezone = now
+ self._interval = interval or IntervalType.day
+ self._now_without_timezone = now
+
+ if not isinstance(self._interval, IntervalType) or re.match(r"[^a-z]", self._interval.name):
+ raise ValueError(f"Invalid interval: {interval}")
- @cached_property
def date_to(self) -> datetime:
- date_to = self._now
+ date_to = self.now_with_timezone
delta_mapping = None
if self._date_range and self._date_range.date_to:
date_to, delta_mapping = relative_date_parse_with_delta_mapping(
- self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self._now
+ self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone
)
is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None
- if not self.is_hourly():
+ if not self.is_hourly:
date_to = date_to.replace(hour=23, minute=59, second=59, microsecond=999999)
elif is_relative:
date_to = date_to.replace(minute=59, second=59, microsecond=999999)
return date_to
- def get_earliest_timestamp(self):
+ def get_earliest_timestamp(self) -> datetime:
return get_earliest_timestamp(self._team.pk)
- @cached_property
def date_from(self) -> datetime:
date_from: datetime
if self._date_range and self._date_range.date_from == "all":
date_from = self.get_earliest_timestamp()
elif self._date_range and isinstance(self._date_range.date_from, str):
- date_from = relative_date_parse(self._date_range.date_from, self._team.timezone_info, now=self._now)
+ date_from = relative_date_parse(
+ self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone
+ )
else:
- date_from = self._now.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
+ date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta(
days=DEFAULT_DATE_FROM_DAYS
)
- if not self.is_hourly():
+ if not self.is_hourly:
date_from = date_from.replace(hour=0, minute=0, second=0, microsecond=0)
return date_from
@cached_property
- def _now(self):
- return self._localize_to_team(self._now_non_timezone)
-
- def _localize_to_team(self, target: datetime):
- return target.astimezone(pytz.timezone(self._team.timezone))
+ def now_with_timezone(self) -> datetime:
+ return self._now_without_timezone.astimezone(ZoneInfo(self._team.timezone))
@cached_property
def date_to_str(self) -> str:
- return self.date_to.strftime("%Y-%m-%d %H:%M:%S")
+ return self.date_to().strftime("%Y-%m-%d %H:%M:%S")
@cached_property
def date_from_str(self) -> str:
- return self.date_from.strftime("%Y-%m-%d %H:%M:%S")
-
- def is_hourly(self):
- return self.interval.name == "hour"
+ return self.date_from().strftime("%Y-%m-%d %H:%M:%S")
@cached_property
- def date_to_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_to_str}'))")
+ def is_hourly(self) -> bool:
+ return self.interval_name == "hour"
@cached_property
- def date_from_as_hogql(self):
- return parse_expr(f"assumeNotNull(toDateTime('{self.date_from_str}'))")
-
- @cached_property
- def interval(self):
+ def interval_type(self) -> IntervalType:
return self._interval or IntervalType.day
@cached_property
- def one_interval_period_as_hogql(self):
- return parse_expr(f"toInterval{self.interval.capitalize()}(1)")
+ def interval_name(self) -> str:
+ return self.interval_type.name
- @lru_cache
- def interval_periods_as_hogql(self, s: str):
- return parse_expr(f"toInterval{self.interval.capitalize()}({s})")
+ def date_to_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])]
+ )
- @cached_property
- def interval_period_string(self):
- return self.interval.value
+ def date_from_as_hogql(self) -> ast.Expr:
+ return ast.Call(
+ name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])]
+ )
- @cached_property
- def interval_period_string_as_hogql(self):
- return ast.Constant(value=self.interval.value)
+ def one_interval_period(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)])
+
+ def number_interval_periods(self) -> ast.Expr:
+ return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])])
+
+ def interval_period_string_as_hogql_constant(self) -> ast.Expr:
+ return ast.Constant(value=self.interval_name)
diff --git a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
index 5cc56252b046f..fb35ace5f5baa 100644
--- a/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
+++ b/posthog/hogql_queries/test/test_lifecycle_hogql_query.py
@@ -1,11 +1,7 @@
-from datetime import datetime
-
from freezegun import freeze_time
-from posthog.hogql.query import execute_hogql_query
from posthog.models.utils import UUIDT
-from posthog.hogql_queries.lifecycle_hogql_query import create_events_query, create_time_filter, run_lifecycle_query
-from posthog.hogql_queries.query_date_range import QueryDateRange
+from posthog.hogql_queries.lifecycle_hogql_query import run_lifecycle_query
from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode
from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events
@@ -67,99 +63,6 @@ def _create_test_events(self):
]
)
- def _run_events_query(self, date_from, date_to, interval):
- date_range = QueryDateRange(
- date_range=DateRange(date_from=date_from, date_to=date_to),
- team=self.team,
- interval=interval,
- now=datetime.strptime("2020-01-30T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"),
- )
- time_filter = create_time_filter(date_range)
-
- # TODO probably doesn't make sense to test like this
- # maybe this query should be what is returned by the function
- events_query = create_events_query(event_filter=time_filter, date_range=date_range)
- return execute_hogql_query(
- team=self.team,
- query="""
- SELECT
- start_of_period, count(DISTINCT person_id) AS counts, status
- FROM {events_query}
- GROUP BY start_of_period, status
- """,
- query_type="LifecycleQuery",
- placeholders={"events_query": events_query},
- )
-
- def test_events_query_whole_range(self):
- self._create_test_events()
-
- date_from = "2020-01-09"
- date_to = "2020-01-19"
-
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 9, 0, 0), 1, "new"), # p2
- (datetime(2020, 1, 10, 0, 0), 1, "dormant"), # p2
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 15, 0, 0), 1, "new"), # p4
- (datetime(2020, 1, 16, 0, 0), 2, "dormant"), # p1, p4
- (datetime(2020, 1, 17, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 18, 0, 0), 1, "dormant"), # p1
- (datetime(2020, 1, 19, 0, 0), 1, "resurrecting"), # p1
- (datetime(2020, 1, 20, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- def test_events_query_partial_range(self):
- self._create_test_events()
- date_from = "2020-01-12"
- date_to = "2020-01-14"
- response = self._run_events_query(date_from, date_to, IntervalType.day)
-
- self.assertEqual(
- {
- (datetime(2020, 1, 11, 0, 0), 1, "new"), # p1
- (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- },
- set(response.results),
- )
-
- # def test_start_on_dormant(self):
- # self.create_test_events()
- # date_from = "2020-01-13"
- # date_to = "2020-01-14"
- # response = self.run_events_query(date_from, date_to, IntervalType.day)
- #
- # self.assertEqual(
- # {
- # (datetime(2020, 1, 12, 0, 0), 1, "new"), # p3
- # # TODO this currently fails, as it treats p1 as resurrecting.
- # # This might just be fine, later in the query we would just throw away results before the 13th
- # (datetime(2020, 1, 12, 0, 0), 1, "resurrecting"), # p2
- # (datetime(2020, 1, 12, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 1, "returning"), # p1
- # (datetime(2020, 1, 13, 0, 0), 2, "dormant"), # p2, p3
- # (datetime(2020, 1, 14, 0, 0), 1, "dormant"), # p1
- # },
- # set(response.results),
- # )
-
def _run_lifecycle_query(self, date_from, date_to, interval):
series = [EventsNode(event="$pageview")]
query = LifecycleQuery(
@@ -175,7 +78,7 @@ def test_lifecycle_query_whole_range(self):
response = self._run_lifecycle_query(date_from, date_to, IntervalType.day)
- statuses = [res["status"] for res in response["result"]]
+ statuses = [res["status"] for res in response.result]
self.assertEqual(["new", "returning", "resurrecting", "dormant"], statuses)
self.assertEqual(
@@ -357,5 +260,5 @@ def test_lifecycle_query_whole_range(self):
"status": "dormant",
},
],
- response["result"],
+ response.result,
)
diff --git a/posthog/hogql_queries/test/test_query_date_range.py b/posthog/hogql_queries/test/test_query_date_range.py
index 82966cc5f1bff..42787912887b2 100644
--- a/posthog/hogql_queries/test/test_query_date_range.py
+++ b/posthog/hogql_queries/test/test_query_date_range.py
@@ -10,32 +10,17 @@ def test_parsed_date(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25T23:59:59.999999Z"))
def test_parsed_date_hour(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23T00:00:00Z"))
self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-23T00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25T00:59:59.999999Z"),
+ query_date_range.date_to(), parser.isoparse("2021-08-25T00:59:59.999999Z")
) # ensure last hour is included
def test_parsed_date_middle_of_hour(self):
@@ -43,34 +28,25 @@ def test_parsed_date_middle_of_hour(self):
date_range = DateRange(date_from="2021-08-23 05:00:00", date_to="2021-08-26 07:00:00")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
-
- self.assertEqual(parsed_date_from, parser.isoparse("2021-08-23 05:00:00Z"))
- self.assertEqual(parsed_date_to, parser.isoparse("2021-08-26 07:00:00Z")) # ensure last hour is included
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-23 05:00:00Z"))
+ self.assertEqual(
+ query_date_range.date_to(), parser.isoparse("2021-08-26 07:00:00Z")
+ ) # ensure last hour is included
def test_parsed_date_week(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-7d")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.week, now=now)
- parsed_date_from = query_date_range.date_from
- parsed_date_to = query_date_range.date_to
- self.assertEqual(
- parsed_date_from,
- parser.isoparse("2021-08-18 00:00:00Z"),
- )
- self.assertEqual(
- parsed_date_to,
- parser.isoparse("2021-08-25 23:59:59.999999Z"),
- )
+ self.assertEqual(query_date_range.date_from(), parser.isoparse("2021-08-18 00:00:00Z"))
+ self.assertEqual(query_date_range.date_to(), parser.isoparse("2021-08-25 23:59:59.999999Z"))
def test_is_hourly(self):
now = parser.isoparse("2021-08-25T00:00:00.000Z")
date_range = DateRange(date_from="-48h")
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.day, now=now)
- self.assertFalse(query_date_range.is_hourly())
+ self.assertFalse(query_date_range.is_hourly)
query_date_range = QueryDateRange(team=self.team, date_range=date_range, interval=IntervalType.hour, now=now)
- self.assertTrue(query_date_range.is_hourly())
+ self.assertTrue(query_date_range.is_hourly)
diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py
index 20b0b4c89ca86..eadf71532db02 100644
--- a/posthog/management/commands/create_batch_export_from_app.py
+++ b/posthog/management/commands/create_batch_export_from_app.py
@@ -48,6 +48,12 @@ def add_arguments(self, parser):
default=False,
help="Backfill the newly created BatchExport with the last period of data.",
)
+ parser.add_argument(
+ "--migrate-disabled-plugin-config",
+ action="store_true",
+ default=False,
+ help="Migrate a PluginConfig even if its disabled.",
+ )
def handle(self, *args, **options):
"""Handle creation of a BatchExport from a given PluginConfig."""
@@ -82,8 +88,8 @@ def handle(self, *args, **options):
"destination_data": destination_data,
}
- if dry_run is True:
- self.stdout.write("No BatchExport will be created as this is a dry run or confirmation check rejected.")
+ if dry_run is True or (options["migrate_disabled_plugin_config"] is False and plugin_config.enabled is False):
+ self.stdout.write("No BatchExport will be created as this is a dry run or existing plugin is disabled.")
return json.dumps(batch_export_data, indent=4, default=str)
else:
destination = BatchExportDestination(**batch_export_data["destination_data"])
diff --git a/posthog/management/commands/test/test_create_batch_export_from_app.py b/posthog/management/commands/test/test_create_batch_export_from_app.py
index 4a51975d86648..bbbb36079d013 100644
--- a/posthog/management/commands/test/test_create_batch_export_from_app.py
+++ b/posthog/management/commands/test/test_create_batch_export_from_app.py
@@ -1,4 +1,5 @@
import datetime as dt
+import itertools
import json
import typing
@@ -116,6 +117,20 @@ def plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginC
raise ValueError(f"Unsupported plugin: {request.param}")
+@pytest.fixture
+def disabled_plugin_config(request, s3_plugin_config, snowflake_plugin_config) -> PluginConfig:
+ if request.param == "S3":
+ s3_plugin_config.enabled = False
+ s3_plugin_config.save()
+ return s3_plugin_config
+ elif request.param == "Snowflake":
+ snowflake_plugin_config.enabled = False
+ snowflake_plugin_config.save()
+ return snowflake_plugin_config
+ else:
+ raise ValueError(f"Unsupported plugin: {request.param}")
+
+
@pytest.mark.django_db
@pytest.mark.parametrize(
"plugin_config,config,expected_type",
@@ -155,7 +170,6 @@ def test_create_batch_export_from_app_fails_with_mismatched_team_id(plugin_confi
@pytest.mark.parametrize("plugin_config", ["S3", "Snowflake"], indirect=True)
def test_create_batch_export_from_app_dry_run(plugin_config):
"""Test a dry_run of the create_batch_export_from_app command."""
-
output = call_command(
"create_batch_export_from_app",
f"--plugin-config-id={plugin_config.id}",
@@ -166,6 +180,7 @@ def test_create_batch_export_from_app_dry_run(plugin_config):
batch_export_data = json.loads(output)
+ assert "id" not in batch_export_data
assert batch_export_data["team_id"] == plugin_config.team.id
assert batch_export_data["interval"] == "hour"
assert batch_export_data["name"] == f"{export_type} Export"
@@ -178,19 +193,14 @@ def test_create_batch_export_from_app_dry_run(plugin_config):
@pytest.mark.django_db
@pytest.mark.parametrize(
"interval,plugin_config,disable_plugin_config",
- [
- ("hour", "S3", True),
- ("hour", "S3", False),
- ("day", "S3", True),
- ("day", "S3", False),
- ("hour", "Snowflake", True),
- ("hour", "Snowflake", False),
- ("day", "Snowflake", True),
- ("day", "Snowflake", False),
- ],
+ itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]),
indirect=["plugin_config"],
)
-def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_config):
+def test_create_batch_export_from_app(
+ interval,
+ plugin_config,
+ disable_plugin_config,
+):
"""Test a live run of the create_batch_export_from_app command."""
args = [
f"--plugin-config-id={plugin_config.id}",
@@ -237,6 +247,69 @@ def test_create_batch_export_from_app(interval, plugin_config, disable_plugin_co
assert args[key] == expected
+@pytest.mark.django_db
+@pytest.mark.parametrize(
+ "interval,disabled_plugin_config,migrate_disabled_plugin_config",
+ itertools.product(["hour", "day"], ["S3", "Snowflake"], [True, False]),
+ indirect=["disabled_plugin_config"],
+)
+def test_create_batch_export_from_app_with_disabled_plugin(
+ interval,
+ disabled_plugin_config,
+ migrate_disabled_plugin_config,
+):
+ """Test a live run of the create_batch_export_from_app command."""
+ args = [
+ f"--plugin-config-id={disabled_plugin_config.id}",
+ f"--team-id={disabled_plugin_config.team.id}",
+ f"--interval={interval}",
+ ]
+ if migrate_disabled_plugin_config:
+ args.append("--migrate-disabled-plugin-config")
+
+ output = call_command("create_batch_export_from_app", *args)
+
+ disabled_plugin_config.refresh_from_db()
+ assert disabled_plugin_config.enabled is False
+
+ export_type, config = map_plugin_config_to_destination(disabled_plugin_config)
+
+ batch_export_data = json.loads(output)
+
+ assert batch_export_data["team_id"] == disabled_plugin_config.team.id
+ assert batch_export_data["interval"] == interval
+ assert batch_export_data["name"] == f"{export_type} Export"
+ assert batch_export_data["destination_data"] == {
+ "type": export_type,
+ "config": config,
+ }
+
+ if not migrate_disabled_plugin_config:
+ assert "id" not in batch_export_data
+ return
+
+ assert "id" in batch_export_data
+
+ temporal = sync_connect()
+
+ schedule = describe_schedule(temporal, str(batch_export_data["id"]))
+ expected_interval = dt.timedelta(**{f"{interval}s": 1})
+ assert schedule.schedule.spec.intervals[0].every == expected_interval
+
+ codec = EncryptionCodec(settings=settings)
+ decoded_payload = async_to_sync(codec.decode)(schedule.schedule.action.args)
+ args = json.loads(decoded_payload[0].data)
+
+ # Common inputs
+ assert args["team_id"] == disabled_plugin_config.team.pk
+ assert args["batch_export_id"] == str(batch_export_data["id"])
+ assert args["interval"] == interval
+
+ # Type specific inputs
+ for key, expected in config.items():
+ assert args[key] == expected
+
+
@async_to_sync
async def list_workflows(temporal, schedule_id: str):
"""List Workflows scheduled by given Schedule."""
diff --git a/posthog/migrations/0348_alter_datawarehousetable_format.py b/posthog/migrations/0348_alter_datawarehousetable_format.py
new file mode 100644
index 0000000000000..72434bbc99fdb
--- /dev/null
+++ b/posthog/migrations/0348_alter_datawarehousetable_format.py
@@ -0,0 +1,20 @@
+# Generated by Django 3.2.19 on 2023-09-11 15:22
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0347_add_bigquery_export_type"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="datawarehousetable",
+ name="format",
+ field=models.CharField(
+ choices=[("CSV", "CSV"), ("Parquet", "Parquet"), ("JSONEachRow", "JSON")], max_length=128
+ ),
+ ),
+ ]
diff --git a/posthog/migrations/0349_update_survey_query_name.py b/posthog/migrations/0349_update_survey_query_name.py
new file mode 100644
index 0000000000000..cbcbbb3a0c954
--- /dev/null
+++ b/posthog/migrations/0349_update_survey_query_name.py
@@ -0,0 +1,38 @@
+# Generated by Django 3.2.19 on 2023-09-12 10:35
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0348_alter_datawarehousetable_format"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="survey",
+ name="linked_flag",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="surveys_linked_flag",
+ related_query_name="survey_linked_flag",
+ to="posthog.featureflag",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="survey",
+ name="targeting_flag",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.SET_NULL,
+ related_name="surveys_targeting_flag",
+ related_query_name="survey_targeting_flag",
+ to="posthog.featureflag",
+ ),
+ ),
+ ]
diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py
new file mode 100644
index 0000000000000..bfe4b079b9945
--- /dev/null
+++ b/posthog/migrations/0350_add_notebook_text_content.py
@@ -0,0 +1,18 @@
+# Generated by Django 3.2.19 on 2023-09-12 18:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("posthog", "0349_update_survey_query_name"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="notebook",
+ name="text_content",
+ field=models.TextField(blank=True, null=True),
+ ),
+ ]
diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py
index ba47b2c326ff1..f3b36e2c3dbd0 100644
--- a/posthog/models/activity_logging/activity_log.py
+++ b/posthog/models/activity_logging/activity_log.py
@@ -99,7 +99,7 @@ class Meta:
field_exclusions: Dict[ActivityScope, List[str]] = {
- "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by"],
+ "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"],
"FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"],
"Person": [
"id",
diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py
index 8b6a2fbd33d27..8f76e9f79fb91 100644
--- a/posthog/models/event/util.py
+++ b/posthog/models/event/util.py
@@ -3,7 +3,7 @@
import uuid
from typing import Any, Dict, List, Optional, Set, Union
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.utils import timezone
from rest_framework import serializers
@@ -47,7 +47,7 @@ def create_event(
timestamp = timezone.now()
assert timestamp is not None
- timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(pytz.utc)
+ timestamp = isoparse(timestamp) if isinstance(timestamp, str) else timestamp.astimezone(ZoneInfo("UTC"))
elements_chain = ""
if elements and len(elements) > 0:
@@ -89,7 +89,9 @@ def format_clickhouse_timestamp(
if default is None:
default = timezone.now()
parsed_datetime = (
- isoparse(raw_timestamp) if isinstance(raw_timestamp, str) else (raw_timestamp or default).astimezone(pytz.utc)
+ isoparse(raw_timestamp)
+ if isinstance(raw_timestamp, str)
+ else (raw_timestamp or default).astimezone(ZoneInfo("UTC"))
)
return parsed_datetime.strftime("%Y-%m-%d %H:%M:%S.%f")
@@ -110,16 +112,16 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di
inserts = []
params: Dict[str, Any] = {}
for index, event in enumerate(events):
- datetime64_default_timestamp = timezone.now().astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S")
+ datetime64_default_timestamp = timezone.now().astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S")
timestamp = event.get("timestamp") or dt.datetime.now()
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
# Offset timezone-naive datetime by project timezone, to facilitate @also_test_with_different_timezones
if timestamp.tzinfo is None:
team_timezone = event["team"].timezone if event.get("team") else "UTC"
- timestamp = pytz.timezone(team_timezone).localize(timestamp)
+ timestamp = timestamp.replace(tzinfo=ZoneInfo(team_timezone))
# Format for ClickHouse
- timestamp = timestamp.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S.%f")
+ timestamp = timestamp.astimezone(ZoneInfo("UTC")).strftime("%Y-%m-%d %H:%M:%S.%f")
elements_chain = ""
if event.get("elements") and len(event["elements"]) > 0:
diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py
index 9579ab7a5a782..5287747ea98f5 100644
--- a/posthog/models/feedback/survey.py
+++ b/posthog/models/feedback/survey.py
@@ -24,7 +24,7 @@ class Meta:
blank=True,
on_delete=models.SET_NULL,
related_name="surveys_linked_flag",
- related_query_name="survey",
+ related_query_name="survey_linked_flag",
)
targeting_flag: models.ForeignKey = models.ForeignKey(
"posthog.FeatureFlag",
@@ -32,7 +32,7 @@ class Meta:
blank=True,
on_delete=models.SET_NULL,
related_name="surveys_targeting_flag",
- related_query_name="survey",
+ related_query_name="survey_targeting_flag",
)
type: models.CharField = models.CharField(max_length=40, choices=SurveyType.choices)
diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py
index 530b7b83b9d13..bbb727407c6be 100644
--- a/posthog/models/filters/mixins/common.py
+++ b/posthog/models/filters/mixins/common.py
@@ -4,7 +4,7 @@
from math import ceil
from typing import Any, Dict, List, Literal, Optional, Union, cast
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from rest_framework.exceptions import ValidationError
@@ -361,11 +361,13 @@ def date_to(self) -> datetime.datetime:
if isinstance(self._date_to, str):
try:
return datetime.datetime.strptime(self._date_to, "%Y-%m-%d").replace(
- hour=23, minute=59, second=59, microsecond=999999, tzinfo=pytz.UTC
+ hour=23, minute=59, second=59, microsecond=999999, tzinfo=ZoneInfo("UTC")
)
except ValueError:
try:
- return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC)
+ return datetime.datetime.strptime(self._date_to, "%Y-%m-%d %H:%M:%S").replace(
+ tzinfo=ZoneInfo("UTC")
+ )
except ValueError:
date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_to, self.team.timezone_info, always_truncate=True) # type: ignore
self.date_to_delta_mapping = delta_mapping
diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py
index a6d38bf76e7ce..53146bf62a7b3 100644
--- a/posthog/models/filters/mixins/retention.py
+++ b/posthog/models/filters/mixins/retention.py
@@ -90,7 +90,7 @@ def date_to(self) -> datetime:
date_to = date_to + self.period_increment
if self.period == "Hour":
- return date_to
+ return date_to.replace(minute=0, second=0, microsecond=0)
else:
return date_to.replace(hour=0, minute=0, second=0, microsecond=0)
diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr
index 922fdf12a27f1..9be8465ff5f0f 100644
--- a/posthog/models/filters/test/__snapshots__/test_filter.ambr
+++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr
@@ -11,6 +11,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -22,6 +23,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -60,6 +62,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -71,6 +74,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -109,6 +113,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -120,6 +125,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -158,6 +164,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -169,6 +176,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
@@ -207,6 +215,7 @@
"posthog_team"."updated_at",
"posthog_team"."anonymize_ips",
"posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
"posthog_team"."ingested_event",
"posthog_team"."autocapture_opt_out",
"posthog_team"."autocapture_exceptions_opt_in",
@@ -218,6 +227,7 @@
"posthog_team"."signup_token",
"posthog_team"."is_demo",
"posthog_team"."access_control",
+ "posthog_team"."week_start_day",
"posthog_team"."inject_web_apps",
"posthog_team"."test_account_filters",
"posthog_team"."test_account_filters_default_checked",
diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py
index 1fcf975ca70b2..fa3520dc9912c 100644
--- a/posthog/models/group/util.py
+++ b/posthog/models/group/util.py
@@ -2,7 +2,7 @@
import json
from typing import Dict, Optional, Union
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.utils.timezone import now
@@ -27,7 +27,7 @@ def raw_create_group_ch(
DON'T USE DIRECTLY - `create_group` is the correct option,
unless you specifically want to sync Postgres state from ClickHouse yourself."""
if timestamp is None:
- timestamp = now().astimezone(pytz.utc)
+ timestamp = now().astimezone(ZoneInfo("UTC"))
data = {
"group_type_index": group_type_index,
"group_key": group_key,
@@ -58,7 +58,7 @@ def create_group(
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
raw_create_group_ch(team_id, group_type_index, group_key, properties, timestamp, timestamp=timestamp, sync=sync)
group = Group.objects.create(
diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py
index dde92fddab944..490645909df26 100644
--- a/posthog/models/notebook/notebook.py
+++ b/posthog/models/notebook/notebook.py
@@ -12,6 +12,7 @@ class Notebook(UUIDModel):
team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE)
title: models.CharField = models.CharField(max_length=256, blank=True, null=True)
content: JSONField = JSONField(default=None, null=True, blank=True)
+ text_content: models.TextField = models.TextField(blank=True, null=True)
deleted: models.BooleanField = models.BooleanField(default=False)
version: models.IntegerField = models.IntegerField(default=0)
created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True)
diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py
index 0be065a3258fb..9af13bc6e9d05 100644
--- a/posthog/models/person/util.py
+++ b/posthog/models/person/util.py
@@ -4,7 +4,7 @@
from typing import Dict, List, Optional, Union
from uuid import UUID
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from django.db.models.query import QuerySet
from django.db.models.signals import post_delete, post_save
@@ -124,12 +124,12 @@ def create_person(
if isinstance(timestamp, str):
timestamp = isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
if created_at is None:
created_at = timestamp
else:
- created_at = created_at.astimezone(pytz.utc)
+ created_at = created_at.astimezone(ZoneInfo("UTC"))
data = {
"id": str(uuid),
diff --git a/posthog/models/session_replay_event/migrations_sql.py b/posthog/models/session_replay_event/migrations_sql.py
index 09f4e300be624..b11f5581c930f 100644
--- a/posthog/models/session_replay_event/migrations_sql.py
+++ b/posthog/models/session_replay_event/migrations_sql.py
@@ -65,3 +65,29 @@
table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
cluster=settings.CLICKHOUSE_CLUSTER,
)
+
+# migration to add size column to the session replay table
+ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN = """
+ ALTER TABLE {table_name} on CLUSTER '{cluster}'
+ ADD COLUMN IF NOT EXISTS message_count SimpleAggregateFunction(sum, Int64),
+ ADD COLUMN IF NOT EXISTS event_count SimpleAggregateFunction(sum, Int64),
+ -- fly by addition so that we can track lag in the data the same way as for other tables
+ ADD COLUMN IF NOT EXISTS _timestamp SimpleAggregateFunction(max, DateTime)
+"""
+
+ADD_EVENT_COUNT_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = (
+ lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ )
+)
+
+ADD_EVENT_COUNT_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name="writable_session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
+
+ADD_EVENT_COUNT_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_EVENT_COUNT_COLUMN.format(
+ table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
diff --git a/posthog/models/session_replay_event/sql.py b/posthog/models/session_replay_event/sql.py
index 1221fd80bb6de..dfe839843979f 100644
--- a/posthog/models/session_replay_event/sql.py
+++ b/posthog/models/session_replay_event/sql.py
@@ -27,7 +27,9 @@
console_log_count Int64,
console_warn_count Int64,
console_error_count Int64,
- size Int64
+ size Int64,
+ event_count Int64,
+ message_count Int64
) ENGINE = {engine}
"""
@@ -53,7 +55,15 @@
console_warn_count SimpleAggregateFunction(sum, Int64),
console_error_count SimpleAggregateFunction(sum, Int64),
-- this column allows us to estimate the amount of data that is being ingested
- size SimpleAggregateFunction(sum, Int64)
+ size SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of messages received in a session
+ -- often very useful in incidents or debugging
+ message_count SimpleAggregateFunction(sum, Int64),
+ -- this allows us to count the number of snapshot events received in a session
+ -- often very useful in incidents or debugging
+ -- because we batch events we expect message_count to be lower than event_count
+ event_count SimpleAggregateFunction(sum, Int64),
+ _timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = {engine}
"""
@@ -117,7 +127,11 @@
sum(console_log_count) as console_log_count,
sum(console_warn_count) as console_warn_count,
sum(console_error_count) as console_error_count,
-sum(size) as size
+sum(size) as size,
+-- we can count the number of kafka messages instead of sending it explicitly
+sum(message_count) as message_count,
+sum(event_count) as event_count,
+max(_timestamp) as _timestamp
FROM {database}.kafka_session_replay_events
group by session_id, team_id
""".format(
diff --git a/posthog/models/test/test_subscription_model.py b/posthog/models/test/test_subscription_model.py
index 232b6c99791cc..bc9bf583e6f15 100644
--- a/posthog/models/test/test_subscription_model.py
+++ b/posthog/models/test/test_subscription_model.py
@@ -3,7 +3,7 @@
import jwt
import pytest
-import pytz
+from zoneinfo import ZoneInfo
from django.conf import settings
from django.utils import timezone
from freezegun import freeze_time
@@ -33,7 +33,7 @@ def _create_insight_subscription(self, **kwargs):
target_value="tests@posthog.com",
frequency="weekly",
interval=2,
- start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC),
+ start_date=datetime(2022, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
)
params.update(**kwargs)
@@ -44,8 +44,8 @@ def test_creation(self):
subscription.save()
assert subscription.title == "My Subscription"
- subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=pytz.UTC))
- assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=pytz.UTC)
+ subscription.set_next_delivery_date(datetime(2022, 1, 2, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC")))
+ assert subscription.next_delivery_date == datetime(2022, 1, 15, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_update_next_delivery_date_on_save(self):
subscription = self._create_insight_subscription()
@@ -60,7 +60,7 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self):
old_date = subscription.next_delivery_date
# Change a property that does affect it
- subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=pytz.UTC)
+ subscription.start_date = datetime(2023, 1, 1, 0, 0, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
subscription.save()
assert old_date != subscription.next_delivery_date
old_date = subscription.next_delivery_date
@@ -72,7 +72,6 @@ def test_only_updates_next_delivery_date_if_rrule_changes(self):
assert old_date == subscription.next_delivery_date
def test_generating_token(self):
-
subscription = self._create_insight_subscription(
target_value="test1@posthog.com,test2@posthog.com,test3@posthog.com"
)
@@ -143,13 +142,13 @@ def test_complex_rrule_configuration(self):
# Last wed or fri of 01.22 is Wed 28th
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 1, 28, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
# Last wed or fri of 01.22 is Wed 30th
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 3, 30, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
# Last wed or fri of 01.22 is Fri 27th
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 5, 27, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_should_work_for_nth_days(self):
# Equivalent to last monday and wednesday of every other month
@@ -160,15 +159,15 @@ def test_should_work_for_nth_days(self):
byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"],
)
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
subscription.set_next_delivery_date(subscription.next_delivery_date)
- assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 2, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_should_ignore_bysetpos_if_missing_weeekday(self):
# Equivalent to last monday and wednesday of every other month
subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=3)
subscription.save()
- assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=pytz.UTC)
+ assert subscription.next_delivery_date == datetime(2022, 2, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC"))
def test_subscription_summary(self):
subscription = self._create_insight_subscription(interval=1, frequency="monthly", bysetpos=None)
diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py
index 4b7f2864546ad..484f01546001b 100644
--- a/posthog/queries/app_metrics/historical_exports.py
+++ b/posthog/queries/app_metrics/historical_exports.py
@@ -2,7 +2,7 @@
from datetime import timedelta
from typing import Dict, Optional
-import pytz
+from zoneinfo import ZoneInfo
from posthog.models.activity_logging.activity_log import ActivityLog
from posthog.models.plugin import PluginStorage
@@ -65,10 +65,12 @@ def historical_export_metrics(team: Team, plugin_config_id: int, job_id: str):
filter_data = {
"category": "exportEvents",
"job_id": job_id,
- "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(pytz.utc).isoformat(),
+ "date_from": (export_summary["created_at"] - timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat(),
}
if "finished_at" in export_summary:
- filter_data["date_to"] = (export_summary["finished_at"] + timedelta(hours=1)).astimezone(pytz.utc).isoformat()
+ filter_data["date_to"] = (
+ (export_summary["finished_at"] + timedelta(hours=1)).astimezone(ZoneInfo("UTC")).isoformat()
+ )
filter = AppMetricsRequestSerializer(data=filter_data)
filter.is_valid(raise_exception=True)
diff --git a/posthog/queries/funnels/test/test_funnel_trends.py b/posthog/queries/funnels/test/test_funnel_trends.py
index 1cb191c017ad2..12e8b81af02a5 100644
--- a/posthog/queries/funnels/test/test_funnel_trends.py
+++ b/posthog/queries/funnels/test/test_funnel_trends.py
@@ -1,6 +1,6 @@
from datetime import date, datetime, timedelta
-import pytz
+from zoneinfo import ZoneInfo
from freezegun.api import freeze_time
from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType
@@ -113,43 +113,43 @@ def test_only_one_user_reached_one_step(self):
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 1,
- "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=pytz.UTC),
+ "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
},
],
)
@@ -531,8 +531,8 @@ def test_period_not_final(self):
self.assertEqual(day["reached_to_step_count"], 0)
self.assertEqual(day["conversion_rate"], 0)
self.assertEqual(
- day["timestamp"].replace(tzinfo=pytz.UTC),
- (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=pytz.UTC),
+ day["timestamp"].replace(tzinfo=ZoneInfo("UTC")),
+ (datetime(now.year, now.month, now.day) - timedelta(1)).replace(tzinfo=ZoneInfo("UTC")),
)
day = results[1] # today
@@ -540,7 +540,8 @@ def test_period_not_final(self):
self.assertEqual(day["reached_to_step_count"], 1)
self.assertEqual(day["conversion_rate"], 100)
self.assertEqual(
- day["timestamp"].replace(tzinfo=pytz.UTC), datetime(now.year, now.month, now.day).replace(tzinfo=pytz.UTC)
+ day["timestamp"].replace(tzinfo=ZoneInfo("UTC")),
+ datetime(now.year, now.month, now.day).replace(tzinfo=ZoneInfo("UTC")),
)
def test_two_runs_by_single_user_in_one_period(self):
diff --git a/posthog/queries/properties_timeline/properties_timeline_event_query.py b/posthog/queries/properties_timeline/properties_timeline_event_query.py
index 5f35a5d91869a..d3ca17eb70091 100644
--- a/posthog/queries/properties_timeline/properties_timeline_event_query.py
+++ b/posthog/queries/properties_timeline/properties_timeline_event_query.py
@@ -1,7 +1,6 @@
import datetime as dt
from typing import Any, Dict, Optional, Tuple
-
-import pytz
+from zoneinfo import ZoneInfo
from posthog.models.entity.util import get_entity_filtering_params
from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter
@@ -76,7 +75,7 @@ def _determine_should_join_sessions(self) -> None:
def _get_date_filter(self) -> Tuple[str, Dict]:
query_params: Dict[str, Any] = {}
query_date_range = QueryDateRange(self._filter, self._team)
- effective_timezone = pytz.timezone(self._team.timezone)
+ effective_timezone = ZoneInfo(self._team.timezone)
# Get effective date range from QueryDateRange
# We need to explicitly replace tzinfo in those datetimes with the team's timezone, because QueryDateRange
# does not reliably make those datetimes timezone-aware. That's annoying, but it'd be a significant effort
diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py
index 927d2766a2358..208bf0207843d 100644
--- a/posthog/queries/query_date_range.py
+++ b/posthog/queries/query_date_range.py
@@ -1,8 +1,8 @@
from datetime import datetime, timedelta
from functools import cached_property
from typing import Dict, Literal, Optional, Tuple
+from zoneinfo import ZoneInfo
-import pytz
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from posthog.models.filters.base_filter import BaseFilter
@@ -82,7 +82,7 @@ def _now(self):
return self._localize_to_team(timezone.now())
def _localize_to_team(self, target: datetime):
- return target.astimezone(pytz.timezone(self._team.timezone))
+ return target.astimezone(ZoneInfo(self._team.timezone))
@cached_property
def date_to_clause(self):
diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py
index dc8f41175521d..145ee1404c37b 100644
--- a/posthog/queries/retention/retention.py
+++ b/posthog/queries/retention/retention.py
@@ -1,7 +1,6 @@
from typing import Any, Dict, List, Optional, Tuple
from urllib.parse import urlencode
-
-import pytz
+from zoneinfo import ZoneInfo
from posthog.constants import RETENTION_FIRST_TIME, RetentionQueryType
from posthog.models.filters.retention_filter import RetentionFilter
@@ -33,7 +32,6 @@ def run(self, filter: RetentionFilter, team: Team, *args, **kwargs) -> List[Dict
def _get_retention_by_breakdown_values(
self, filter: RetentionFilter, team: Team
) -> Dict[CohortKey, Dict[str, Any]]:
-
actor_query, actor_query_params = build_actor_activity_query(
filter=filter, team=team, retention_events_query=self.event_query
)
@@ -109,11 +107,8 @@ def construct_url(first_day):
for day in range(filter.total_intervals - first_day)
],
"label": "{} {}".format(filter.period, first_day),
- "date": pytz.timezone(team.timezone).localize(
- (filter.date_from + RetentionFilter.determine_time_delta(first_day, filter.period)[0]).replace(
- tzinfo=None
- )
- ),
+ "date": filter.date_from.replace(tzinfo=ZoneInfo(team.timezone))
+ + RetentionFilter.determine_time_delta(first_day, filter.period)[0],
"people_url": construct_url(first_day),
}
for first_day in range(filter.total_intervals)
diff --git a/posthog/queries/session_recordings/test/test_session_replay_summaries.py b/posthog/queries/session_recordings/test/test_session_replay_summaries.py
index 0a87ac7473e5a..0b3e361fa9511 100644
--- a/posthog/queries/session_recordings/test/test_session_replay_summaries.py
+++ b/posthog/queries/session_recordings/test/test_session_replay_summaries.py
@@ -1,7 +1,7 @@
from datetime import datetime, timedelta
from uuid import uuid4
-import pytz
+from zoneinfo import ZoneInfo
from dateutil.parser import isoparse
from freezegun import freeze_time
@@ -147,8 +147,8 @@ def test_session_replay_summaries_can_be_queried(self):
session_id,
self.team.pk,
str(self.user.distinct_id),
- datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=pytz.UTC),
- datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=pytz.UTC),
+ datetime(2023, 4, 27, 10, 0, 0, 309000, tzinfo=ZoneInfo("UTC")),
+ datetime(2023, 4, 27, 19, 20, 24, 597000, tzinfo=ZoneInfo("UTC")),
33624,
"https://first-url-ingested.com",
6,
diff --git a/posthog/queries/test/test_retention.py b/posthog/queries/test/test_retention.py
index 7f49141447b9a..42b7c596b14a9 100644
--- a/posthog/queries/test/test_retention.py
+++ b/posthog/queries/test/test_retention.py
@@ -2,7 +2,7 @@
import uuid
from datetime import datetime
-import pytz
+from zoneinfo import ZoneInfo
from django.test import override_settings
from rest_framework import status
@@ -36,15 +36,14 @@ def _create_action(**kwargs):
def _create_signup_actions(team, user_and_timestamps):
-
for distinct_id, timestamp in user_and_timestamps:
_create_event(team=team, event="sign up", distinct_id=distinct_id, timestamp=timestamp)
sign_up_action = _create_action(team=team, name="sign up")
return sign_up_action
-def _date(day, hour=5, month=0):
- return datetime(2020, 6 + month, 10 + day, hour).isoformat()
+def _date(day, hour=5, month=0, minute=0):
+ return datetime(2020, 6 + month, 10 + day, hour, minute).isoformat()
def pluck(list_of_dicts, key, child_key=None):
@@ -53,7 +52,7 @@ def pluck(list_of_dicts, key, child_key=None):
def _create_events(team, user_and_timestamps, event="$pageview"):
i = 0
- for (distinct_id, timestamp, *properties_args) in user_and_timestamps:
+ for distinct_id, timestamp, *properties_args in user_and_timestamps:
properties = {"$some_property": "value"} if i % 2 == 0 else {}
if len(properties_args) == 1:
properties.update(properties_args[0])
@@ -129,7 +128,7 @@ def test_day_interval(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -211,17 +210,17 @@ def test_month_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 1, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 2, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 3, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 4, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 5, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 8, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 9, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 10, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 11, 10, 0, tzinfo=pytz.UTC),
+ datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -372,17 +371,17 @@ def test_month_interval_with_person_on_events_v2(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 1, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 2, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 3, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 4, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 5, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 8, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 9, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 10, 10, 0, tzinfo=pytz.UTC),
- datetime(2020, 11, 10, 0, tzinfo=pytz.UTC),
+ datetime(2020, 1, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 2, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 3, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 4, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 5, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 8, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 9, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 10, 10, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 11, 10, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -425,13 +424,13 @@ def test_week_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 7, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 14, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 21, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 28, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 5, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 12, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 19, 0, tzinfo=pytz.UTC),
+ datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -457,7 +456,7 @@ def test_hour_interval(self):
],
)
- filter = RetentionFilter(data={"date_to": _date(0, hour=16), "period": "Hour"})
+ filter = RetentionFilter(data={"date_to": _date(0, hour=16, minute=13), "period": "Hour"})
result = retention().run(filter, self.team, total_intervals=11)
@@ -498,17 +497,17 @@ def test_hour_interval(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 10, 6, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 7, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 8, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 9, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 10, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 11, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 12, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 13, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 14, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 15, tzinfo=pytz.UTC),
- datetime(2020, 6, 10, 16, tzinfo=pytz.UTC),
+ datetime(2020, 6, 10, 6, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 7, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 8, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 9, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 10, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 11, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 12, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 13, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 14, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 15, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 10, 16, tzinfo=ZoneInfo("UTC")),
],
)
@@ -552,13 +551,13 @@ def test_interval_rounding(self):
self.assertEqual(
pluck(result, "date"),
[
- datetime(2020, 6, 7, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 14, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 21, 0, tzinfo=pytz.UTC),
- datetime(2020, 6, 28, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 5, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 12, 0, tzinfo=pytz.UTC),
- datetime(2020, 7, 19, 0, tzinfo=pytz.UTC),
+ datetime(2020, 6, 7, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 14, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 21, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 6, 28, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 5, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 12, 0, tzinfo=ZoneInfo("UTC")),
+ datetime(2020, 7, 19, 0, tzinfo=ZoneInfo("UTC")),
],
)
@@ -838,7 +837,7 @@ def test_retention_event_action(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -871,7 +870,6 @@ def test_first_time_retention(self):
)
def test_retention_with_properties(self):
-
_create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"])
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
@@ -902,7 +900,7 @@ def test_retention_with_properties(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -956,7 +954,7 @@ def test_retention_with_user_properties(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
[[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
@@ -1006,7 +1004,7 @@ def test_retention_with_user_properties_via_action(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
[[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]],
@@ -1047,7 +1045,7 @@ def test_retention_action_start_point(self):
self.assertEqual(len(result), 7)
self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"])
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1086,7 +1084,7 @@ def test_filter_test_accounts(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1156,7 +1154,6 @@ def _create_first_time_retention_events(self):
return p1, p2, p3, p4
def test_retention_aggregate_by_distinct_id(self):
-
_create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"test": "ok"})
_create_person(team_id=self.team.pk, distinct_ids=["person2"])
@@ -1196,7 +1193,7 @@ def test_retention_aggregate_by_distinct_id(self):
"Day 10",
],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
@@ -1270,7 +1267,7 @@ def test_timezones(self):
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result_pacific[0]["date"], pytz.timezone("US/Pacific").localize(datetime(2020, 6, 10)))
+ self.assertEqual(result_pacific[0]["date"], datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific")))
self.assertEqual(result_pacific[0]["date"].isoformat(), "2020-06-10T00:00:00-07:00")
self.assertEqual(
@@ -1337,7 +1334,7 @@ def test_day_interval_sampled(self):
pluck(result, "label"),
["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"],
)
- self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=pytz.UTC))
+ self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC")))
self.assertEqual(
pluck(result, "values", "count"),
diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py
index 155afbe22c854..3cce0cfd1907a 100644
--- a/posthog/queries/test/test_trends.py
+++ b/posthog/queries/test/test_trends.py
@@ -5,7 +5,7 @@
from unittest.mock import patch, ANY
from urllib.parse import parse_qsl, urlparse
-import pytz
+from zoneinfo import ZoneInfo
from django.conf import settings
from django.core.cache import cache
from django.test import override_settings
@@ -1631,8 +1631,8 @@ def test_hour_interval(self):
)
self.assertEqual(
{
- "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_order": None,
@@ -1687,8 +1687,8 @@ def test_day_interval(self):
)
self.assertEqual(
{
- "date_from": datetime(2020, 11, 1, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 23, 59, 59, 999999, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_order": None,
@@ -3837,8 +3837,8 @@ def test_breakdown_hour_interval(self):
{
"breakdown_type": "event",
"breakdown_value": "Safari",
- "date_from": datetime(2020, 11, 1, 12, tzinfo=pytz.UTC),
- "date_to": datetime(2020, 11, 1, 13, tzinfo=pytz.UTC),
+ "date_from": datetime(2020, 11, 1, 12, tzinfo=ZoneInfo("UTC")),
+ "date_to": datetime(2020, 11, 1, 13, tzinfo=ZoneInfo("UTC")),
"entity_id": "event_name",
"entity_math": None,
"entity_type": "events",
@@ -5603,7 +5603,7 @@ def test_timezones_hourly_relative_from(self):
timestamp="2020-01-05T08:01:01",
)
- query_time = pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 10, 1, 1))
+ query_time = datetime(2020, 1, 5, 10, 1, 1, tzinfo=ZoneInfo(self.team.timezone))
utc_offset_hours = query_time.tzinfo.utcoffset(query_time).total_seconds() // 3600 # type: ignore
utc_offset_sign = "-" if utc_offset_hours < 0 else "+"
with freeze_time(query_time):
@@ -5797,7 +5797,7 @@ def test_timezones_daily(self):
timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere
)
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 5, 5, 0))):
+ with freeze_time(datetime(2020, 1, 5, 5, 0, tzinfo=ZoneInfo(self.team.timezone))):
response = Trends().run(
Filter(data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up"}]}, team=self.team),
self.team,
@@ -6013,7 +6013,7 @@ def test_timezones_weekly(self):
self.team.save()
# TRICKY: This is the previous UTC day in Asia/Tokyo
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))):
+ with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))):
# Total volume query
response_sunday = Trends().run(
Filter(
@@ -6034,7 +6034,7 @@ def test_timezones_weekly(self):
self.team.save()
# TRICKY: This is the previous UTC day in Asia/Tokyo
- with freeze_time(pytz.timezone(self.team.timezone).localize(datetime(2020, 1, 26, 3, 0))):
+ with freeze_time(datetime(2020, 1, 26, 3, 0, tzinfo=ZoneInfo(self.team.timezone))):
# Total volume query
response_monday = Trends().run(
Filter(
diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py
index b5ffeb0b3c33a..7fe281a0c158c 100644
--- a/posthog/queries/trends/breakdown.py
+++ b/posthog/queries/trends/breakdown.py
@@ -4,7 +4,7 @@
from datetime import datetime
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
-import pytz
+from zoneinfo import ZoneInfo
from django.forms import ValidationError
from posthog.constants import (
@@ -294,7 +294,6 @@ def get_query(self) -> Tuple[str, Dict, Callable]:
)
else:
-
breakdown_filter = breakdown_filter.format(**breakdown_filter_params)
if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
@@ -476,7 +475,6 @@ def _get_breakdown_value(self, breakdown: str) -> str:
return breakdown_value
def _get_histogram_breakdown_values(self, raw_breakdown_value: str, buckets: List[int]):
-
multi_if_conditionals = []
values_arr = []
@@ -599,8 +597,8 @@ def _get_persons_url(
getattr(point_date, "hour", 0),
getattr(point_date, "minute", 0),
getattr(point_date, "second", 0),
- tzinfo=getattr(point_date, "tzinfo", pytz.UTC),
- ).astimezone(pytz.UTC)
+ tzinfo=getattr(point_date, "tzinfo", ZoneInfo("UTC")),
+ ).astimezone(ZoneInfo("UTC"))
filter_params = filter.to_params()
extra_params = {
diff --git a/posthog/queries/trends/test/test_person.py b/posthog/queries/trends/test/test_person.py
index 1d98dfd83b7c9..f68a4ed13b9bd 100644
--- a/posthog/queries/trends/test/test_person.py
+++ b/posthog/queries/trends/test/test_person.py
@@ -1,8 +1,11 @@
+import json
+from datetime import datetime
from uuid import UUID
from dateutil.relativedelta import relativedelta
from django.utils import timezone
from freezegun.api import freeze_time
+from unittest.case import skip
from posthog.models.entity import Entity
from posthog.models.filters import Filter
@@ -15,12 +18,12 @@
ClickhouseTestMixin,
_create_event,
_create_person,
+ flush_persons_and_events,
snapshot_clickhouse_queries,
)
class TestPerson(ClickhouseTestMixin, APIBaseTest):
-
# Note: not using `@snapshot_clickhouse_queries` here because the ordering of the session_ids in the recording
# query is not guaranteed, so adding it would lead to a flaky test.
@freeze_time("2021-01-21T20:00:00.000Z")
@@ -155,3 +158,145 @@ def test_group_query_includes_recording_events(self):
}
],
)
+
+
+class TestPersonIntegration(ClickhouseTestMixin, APIBaseTest):
+ def test_weekly_active_users(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[5], 2)
+
+ persons_url = insight_response[0].get("persons_urls")[5].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_17", "u_16"])
+
+ def test_weekly_active_users_grouped_by_week(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "interval": "week",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[0], "17-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[0], 7)
+
+ persons_url = insight_response[0].get("persons_urls")[0].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 7)
+ self.assertEqual(
+ [item["name"] for item in data.get("results")[0].get("people")],
+ ["u_17", "u_16", "u_15", "u_14", "u_13", "u_12", "u_11"],
+ )
+
+ def test_weekly_active_users_cumulative(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"u_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"u_{d}",
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-10T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ "display": "ActionsLineGraphCumulative",
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[1], "11-Sep-2023")
+ self.assertEqual(insight_response[0].get("data")[1], 3)
+
+ persons_url = insight_response[0].get("persons_urls")[1].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_11", "u_10"])
+
+ @skip("see PR 17356")
+ def test_weekly_active_users_breakdown(self):
+ for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep
+ _create_person(team_id=self.team.pk, distinct_ids=[f"a_{d}"])
+ _create_person(team_id=self.team.pk, distinct_ids=[f"b_{d}"])
+ _create_event(
+ event="pageview",
+ distinct_id=f"a_{d}",
+ properties={"some_prop": "a"},
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ _create_event(
+ event="pageview",
+ distinct_id=f"b_{d}",
+ properties={"some_prop": "b"},
+ team=self.team,
+ timestamp=datetime(2023, 9, d, 00, 42),
+ )
+ flush_persons_and_events()
+
+ # request weekly active users in the following week
+ filter = {
+ "insight": "TRENDS",
+ "date_from": "2023-09-17T13:37:00",
+ "date_to": "2023-09-24T13:37:00",
+ "events": json.dumps([{"id": "pageview", "math": "weekly_active"}]),
+ "breakdown": "some_prop",
+ }
+ insight_response = self.client.get(f"/api/projects/{self.team.pk}/insights/trend", data=filter)
+ insight_response = (insight_response.json()).get("result")
+
+ self.assertEqual(insight_response[0].get("labels")[5], "22-Sep-2023")
+ # self.assertEqual(insight_response[0].get("data")[5], 2)
+
+ persons_url = insight_response[0].get("persons_urls")[5].get("url")
+ response = self.client.get("/" + persons_url)
+
+ data = response.json()
+ # self.assertEqual(data.get("results")[0].get("count"), 2)
+ self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["a_17", "a_16"])
diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py
index 3d57726d7886b..154e105e77f92 100644
--- a/posthog/queries/trends/total_volume.py
+++ b/posthog/queries/trends/total_volume.py
@@ -1,5 +1,5 @@
import urllib.parse
-from datetime import date, datetime
+from datetime import date, datetime, timedelta
from typing import Any, Callable, Dict, List, Tuple, Union
from posthog.clickhouse.query_tagging import tag_queries
@@ -256,6 +256,21 @@ def _parse(result: List) -> List:
return _parse
+ def _offset_date_from(self, point_datetime: datetime, filter: Filter, entity: Entity) -> datetime | None:
+ if filter.display == TRENDS_CUMULATIVE:
+ return filter.date_from
+ elif entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
+ # :TRICKY: We have to offset the date by one, as the final query already subtracts 7 days
+ return point_datetime + timedelta(days=1)
+ else:
+ return point_datetime
+
+ def _offset_date_to(self, point_datetime: datetime, filter: Filter, entity: Entity, team: Team) -> datetime:
+ if entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]:
+ return point_datetime
+ else:
+ return offset_time_series_date_by_interval(point_datetime, filter=filter, team=team)
+
def _get_persons_url(
self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime]
) -> List[Dict[str, Any]]:
@@ -267,8 +282,8 @@ def _get_persons_url(
"entity_id": entity.id,
"entity_type": entity.type,
"entity_math": entity.math,
- "date_from": filter.date_from if filter.display == TRENDS_CUMULATIVE else point_datetime,
- "date_to": offset_time_series_date_by_interval(point_datetime, filter=filter, team=team),
+ "date_from": self._offset_date_from(point_datetime, filter=filter, entity=entity),
+ "date_to": self._offset_date_to(point_datetime, filter=filter, entity=entity, team=team),
"entity_order": entity.order,
}
diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py
index e7a96b4eeca5e..940abba59fab5 100644
--- a/posthog/queries/trends/trends.py
+++ b/posthog/queries/trends/trends.py
@@ -3,8 +3,8 @@
from datetime import datetime, timedelta
from itertools import accumulate
from typing import Any, Callable, Dict, List, Optional, Tuple, cast
+from zoneinfo import ZoneInfo
-import pytz
from dateutil import parser
from django.db.models.query import Prefetch
from sentry_sdk import push_scope
@@ -49,7 +49,6 @@ def _get_sql_for_entity(self, filter: Filter, team: Team, entity: Entity) -> Tup
# Use cached result even on refresh if team has strict caching enabled
def get_cached_result(self, filter: Filter, team: Team) -> Optional[List[Dict[str, Any]]]:
-
if not team.strict_caching_enabled or filter.breakdown or filter.display != TRENDS_LINEAR:
return None
@@ -80,7 +79,7 @@ def is_present_timerange(self, cached_result: List[Dict[str, Any]], filter: Filt
latest_date = cached_result[0]["days"][len(cached_result[0]["days"]) - 1]
parsed_latest_date = parser.parse(latest_date)
- parsed_latest_date = parsed_latest_date.replace(tzinfo=pytz.timezone(team.timezone))
+ parsed_latest_date = parsed_latest_date.replace(tzinfo=ZoneInfo(team.timezone))
_is_present = is_filter_date_present(filter, parsed_latest_date)
else:
_is_present = False
diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py
index 00ab25e98460c..93dd843349046 100644
--- a/posthog/queries/trends/trends_event_query_base.py
+++ b/posthog/queries/trends/trends_event_query_base.py
@@ -104,14 +104,14 @@ def _get_not_null_actor_condition(self) -> str:
return f"""AND "$group_{self._entity.math_group_type_index}" != ''"""
def _get_date_filter(self) -> Tuple[str, Dict]:
- date_filter = ""
- query_params: Dict[str, Any] = {}
+ date_query = ""
+ date_params: Dict[str, Any] = {}
query_date_range = QueryDateRange(self._filter, self._team)
parsed_date_from, date_from_params = query_date_range.date_from
parsed_date_to, date_to_params = query_date_range.date_to
- query_params.update(date_from_params)
- query_params.update(date_to_params)
+ date_params.update(date_from_params)
+ date_params.update(date_to_params)
self.parsed_date_from = parsed_date_from
self.parsed_date_to = parsed_date_to
@@ -121,17 +121,17 @@ def _get_date_filter(self) -> Tuple[str, Dict]:
self._filter, self._entity, self._team_id
)
self.active_user_params = active_user_format_params
- query_params.update(active_user_query_params)
+ date_params.update(active_user_query_params)
- date_filter = "{parsed_date_from_prev_range} {parsed_date_to}".format(
+ date_query = "{parsed_date_from_prev_range} {parsed_date_to}".format(
**active_user_format_params, parsed_date_to=parsed_date_to
)
else:
- date_filter = "{parsed_date_from} {parsed_date_to}".format(
+ date_query = "{parsed_date_from} {parsed_date_to}".format(
parsed_date_from=parsed_date_from, parsed_date_to=parsed_date_to
)
- return date_filter, query_params
+ return date_query, date_params
def _get_entity_query(self) -> Tuple[str, Dict]:
entity_params, entity_format_params = get_entity_filtering_params(
diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py
index a153e7f0eae56..46cd2a8041f32 100644
--- a/posthog/queries/trends/util.py
+++ b/posthog/queries/trends/util.py
@@ -1,8 +1,8 @@
import datetime
from datetime import timedelta
from typing import Any, Dict, List, Optional, Tuple, TypeVar
+from zoneinfo import ZoneInfo
-import pytz
import structlog
from dateutil.relativedelta import relativedelta
from rest_framework.exceptions import ValidationError
@@ -191,5 +191,5 @@ def offset_time_series_date_by_interval(date: datetime.datetime, *, filter: F, t
else: # "day" is the default interval
date = date.replace(hour=23, minute=59, second=59, microsecond=999999)
if date.tzinfo is None:
- date = pytz.timezone(team.timezone).localize(date)
+ date = date.replace(tzinfo=ZoneInfo(team.timezone))
return date
diff --git a/posthog/queries/util.py b/posthog/queries/util.py
index 936921732285b..ec218785b1dc9 100644
--- a/posthog/queries/util.py
+++ b/posthog/queries/util.py
@@ -3,7 +3,7 @@
from enum import Enum, auto
from typing import Any, Dict, Optional, Union
-import pytz
+from zoneinfo import ZoneInfo
from django.utils import timezone
from rest_framework.exceptions import ValidationError
@@ -67,16 +67,16 @@ class PersonPropertiesMode(Enum):
"month": "toIntervalMonth",
}
+
# TODO: refactor since this is only used in one spot now
def format_ch_timestamp(timestamp: datetime, convert_to_timezone: Optional[str] = None):
if convert_to_timezone:
# Here we probably get a timestamp set to the beginning of the day (00:00), in UTC
# We need to convert that UTC timestamp to the local timestamp (00:00 in US/Pacific for example)
# Then we convert it back to UTC (08:00 in UTC)
- if timestamp.tzinfo and timestamp.tzinfo != pytz.UTC:
+ if timestamp.tzinfo and timestamp.tzinfo != ZoneInfo("UTC"):
raise ValidationError(detail="You must pass a timestamp with no timezone or UTC")
- timestamp = pytz.timezone(convert_to_timezone).localize(timestamp.replace(tzinfo=None)).astimezone(pytz.UTC)
-
+ timestamp = timestamp.replace(tzinfo=ZoneInfo(convert_to_timezone)).astimezone(ZoneInfo("UTC"))
return timestamp.strftime("%Y-%m-%d %H:%M:%S")
diff --git a/posthog/schema.py b/posthog/schema.py
index 72b581e8c863c..e5a4f6a6ab9b2 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -594,6 +594,14 @@ class Config:
toggledLifecycles: Optional[List[LifecycleToggle]] = None
+class LifecycleQueryResponse(BaseModel):
+ class Config:
+ extra = Extra.forbid
+
+ result: List[Dict[str, Any]]
+ timings: Optional[List[QueryTiming]] = None
+
+
class PersonPropertyFilter(BaseModel):
class Config:
extra = Extra.forbid
@@ -1143,6 +1151,7 @@ class Config:
PropertyGroupFilter,
]
] = Field(None, description="Property filters for all series")
+ response: Optional[LifecycleQueryResponse] = None
samplingFactor: Optional[float] = Field(None, description="Sampling rate")
series: List[Union[EventsNode, ActionsNode]] = Field(..., description="Events and actions to include")
diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py
index 429566418aa1f..ea19b3b405a2b 100644
--- a/posthog/session_recordings/realtime_snapshots.py
+++ b/posthog/session_recordings/realtime_snapshots.py
@@ -38,6 +38,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
key = get_key(team_id, session_id)
encoded_snapshots = redis.zrange(key, 0, -1, withscores=True)
+ # We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be
+ # sending data to redis
+ redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id}))
+
if not encoded_snapshots and attempt_count < ATTEMPT_MAX:
logger.info(
"No realtime snapshots found, publishing subscription and retrying",
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index 9f61e9ee11e82..ca0c035765a7e 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -219,7 +219,7 @@
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATIC_URL = "/static/"
STATICFILES_DIRS = [os.path.join(BASE_DIR, "frontend/dist"), os.path.join(BASE_DIR, "posthog/year_in_posthog/images")]
-STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
+STATICFILES_STORAGE = "whitenoise.storage.ManifestStaticFilesStorage"
AUTH_USER_MODEL = "posthog.User"
diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
index 392534fc8999c..cb38d818ba9d7 100644
--- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
+++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py
@@ -3,11 +3,13 @@
import gzip
import itertools
import json
+import os
from random import randint
from unittest import mock
from uuid import uuid4
import boto3
+import botocore.exceptions
import brotli
import pytest
from django.conf import settings
@@ -40,6 +42,18 @@
TEST_ROOT_BUCKET = "test-batch-exports"
+
+def check_valid_credentials() -> bool:
+ """Check if there are valid AWS credentials in the environment."""
+ sts = boto3.client("sts")
+ try:
+ sts.get_caller_identity()
+ except botocore.exceptions.ClientError:
+ return False
+ else:
+ return True
+
+
create_test_client = functools.partial(boto3.client, endpoint_url=settings.OBJECT_STORAGE_ENDPOINT)
@@ -422,6 +436,163 @@ async def test_s3_export_workflow_with_minio_bucket(
assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+@pytest.mark.skipif(
+ "S3_TEST_BUCKET" not in os.environ or not check_valid_credentials(),
+ reason="AWS credentials not set in environment or missing S3_TEST_BUCKET variable",
+)
+@pytest.mark.django_db
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+ "interval,compression,encryption,exclude_events",
+ itertools.product(["hour", "day"], [None, "gzip", "brotli"], [None, "AES256"], [None, ["test-exclude"]]),
+)
+async def test_s3_export_workflow_with_s3_bucket(interval, compression, encryption, exclude_events):
+ """Test S3 Export Workflow end-to-end by using an S3 bucket.
+
+ The S3_TEST_BUCKET environment variable is used to set the name of the bucket for this test.
+ This test will be skipped if no valid AWS credentials exist, or if the S3_TEST_BUCKET environment
+ variable is not set.
+
+ The workflow should update the batch export run status to completed and produce the expected
+ records to the S3 bucket.
+ """
+ bucket_name = os.getenv("S3_TEST_BUCKET")
+ prefix = f"posthog-events-{str(uuid4())}"
+ destination_data = {
+ "type": "S3",
+ "config": {
+ "bucket_name": bucket_name,
+ "region": "us-east-1",
+ "prefix": prefix,
+ "aws_access_key_id": "object_storage_root_user",
+ "aws_secret_access_key": "object_storage_root_password",
+ "compression": compression,
+ "exclude_events": exclude_events,
+ "encryption": encryption,
+ },
+ }
+
+ batch_export_data = {
+ "name": "my-production-s3-bucket-destination",
+ "destination": destination_data,
+ "interval": interval,
+ }
+
+ organization = await acreate_organization("test")
+ team = await acreate_team(organization=organization)
+ batch_export = await acreate_batch_export(
+ team_id=team.pk,
+ name=batch_export_data["name"],
+ destination_data=batch_export_data["destination"],
+ interval=batch_export_data["interval"],
+ )
+
+ events: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 13:30:00.000000",
+ "created_at": "2023-04-25 13:30:00.000000",
+ "inserted_at": "2023-04-25 13:30:00.000000",
+ "_timestamp": "2023-04-25 13:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ {
+ "uuid": str(uuid4()),
+ "event": "test-exclude",
+ "timestamp": "2023-04-25 14:29:00.000000",
+ "created_at": "2023-04-25 14:29:00.000000",
+ "inserted_at": "2023-04-25 14:29:00.000000",
+ "_timestamp": "2023-04-25 14:29:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ },
+ ]
+
+ if interval == "day":
+ # Add an event outside the hour range but within the day range to ensure it's exported too.
+ events_outside_hour: list[EventValues] = [
+ {
+ "uuid": str(uuid4()),
+ "event": "test",
+ "timestamp": "2023-04-25 00:30:00.000000",
+ "created_at": "2023-04-25 00:30:00.000000",
+ "inserted_at": "2023-04-25 00:30:00.000000",
+ "_timestamp": "2023-04-25 00:30:00",
+ "person_id": str(uuid4()),
+ "person_properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "team_id": team.pk,
+ "properties": {"$browser": "Chrome", "$os": "Mac OS X"},
+ "distinct_id": str(uuid4()),
+ "elements_chain": "this is a comman, separated, list, of css selectors(?)",
+ }
+ ]
+ events += events_outside_hour
+
+ ch_client = ClickHouseClient(
+ url=settings.CLICKHOUSE_HTTP_URL,
+ user=settings.CLICKHOUSE_USER,
+ password=settings.CLICKHOUSE_PASSWORD,
+ database=settings.CLICKHOUSE_DATABASE,
+ )
+
+ # Insert some data into the `sharded_events` table.
+ await insert_events(
+ client=ch_client,
+ events=events,
+ )
+
+ workflow_id = str(uuid4())
+ inputs = S3BatchExportInputs(
+ team_id=team.pk,
+ batch_export_id=str(batch_export.id),
+ data_interval_end="2023-04-25 14:30:00.000000",
+ interval=interval,
+ **batch_export.destination.config,
+ )
+
+ s3_client = boto3.client("s3")
+
+ def create_s3_client(*args, **kwargs):
+ """Mock function to return an already initialized S3 client."""
+ return s3_client
+
+ async with await WorkflowEnvironment.start_time_skipping() as activity_environment:
+ async with Worker(
+ activity_environment.client,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ workflows=[S3BatchExportWorkflow],
+ activities=[create_export_run, insert_into_s3_activity, update_export_run_status],
+ workflow_runner=UnsandboxedWorkflowRunner(),
+ ):
+ with mock.patch("posthog.temporal.workflows.s3_batch_export.boto3.client", side_effect=create_s3_client):
+ await activity_environment.client.execute_workflow(
+ S3BatchExportWorkflow.run,
+ inputs,
+ id=workflow_id,
+ task_queue=settings.TEMPORAL_TASK_QUEUE,
+ retry_policy=RetryPolicy(maximum_attempts=1),
+ execution_timeout=dt.timedelta(seconds=10),
+ )
+
+ runs = await afetch_batch_export_runs(batch_export_id=batch_export.id)
+ assert len(runs) == 1
+
+ run = runs[0]
+ assert run.status == "Completed"
+
+ assert_events_in_s3(s3_client, bucket_name, prefix, events, compression, exclude_events)
+
+
@pytest.mark.django_db
@pytest.mark.asyncio
@pytest.mark.parametrize("compression", [None, "gzip"])
diff --git a/posthog/temporal/workflows/postgres_batch_export.py b/posthog/temporal/workflows/postgres_batch_export.py
index a396f361b77c5..fa83d31404e8e 100644
--- a/posthog/temporal/workflows/postgres_batch_export.py
+++ b/posthog/temporal/workflows/postgres_batch_export.py
@@ -245,7 +245,11 @@ async def run(self, inputs: PostgresBatchExportInputs):
initial_interval=dt.timedelta(seconds=10),
maximum_interval=dt.timedelta(seconds=120),
maximum_attempts=10,
- non_retryable_error_types=[],
+ non_retryable_error_types=[
+ # Raised on errors that are related to database operation.
+ # For example: unexpected disconnect, database or other object not found.
+ "OperationalError"
+ ],
),
)
diff --git a/posthog/temporal/workflows/s3_batch_export.py b/posthog/temporal/workflows/s3_batch_export.py
index 028b6f422e26f..13bbf183e5d06 100644
--- a/posthog/temporal/workflows/s3_batch_export.py
+++ b/posthog/temporal/workflows/s3_batch_export.py
@@ -85,15 +85,20 @@ class S3MultiPartUploadState(typing.NamedTuple):
parts: list[dict[str, str | int]]
+Part = dict[str, str | int]
+
+
class S3MultiPartUpload:
"""An S3 multi-part upload."""
- def __init__(self, s3_client, bucket_name, key):
+ def __init__(self, s3_client, bucket_name: str, key: str, encryption: str | None, kms_key_id: str | None):
self.s3_client = s3_client
self.bucket_name = bucket_name
self.key = key
- self.upload_id = None
- self.parts = []
+ self.encryption = encryption
+ self.kms_key_id = kms_key_id
+ self.upload_id: str | None = None
+ self.parts: list[Part] = []
def to_state(self) -> S3MultiPartUploadState:
"""Produce state tuple that can be used to resume this S3MultiPartUpload."""
@@ -119,10 +124,21 @@ def start(self) -> str:
if self.is_upload_in_progress() is True:
raise UploadAlreadyInProgressError(self.upload_id)
- multipart_response = self.s3_client.create_multipart_upload(Bucket=self.bucket_name, Key=self.key)
- self.upload_id = multipart_response["UploadId"]
+ optional_kwargs = {}
+ if self.encryption:
+ optional_kwargs["ServerSideEncryption"] = self.encryption
+ if self.kms_key_id:
+ optional_kwargs["SSEKMSKeyId"] = self.kms_key_id
- return self.upload_id
+ multipart_response = self.s3_client.create_multipart_upload(
+ Bucket=self.bucket_name,
+ Key=self.key,
+ **optional_kwargs,
+ )
+ upload_id: str = multipart_response["UploadId"]
+ self.upload_id = upload_id
+
+ return upload_id
def continue_from_state(self, state: S3MultiPartUploadState):
"""Continue this S3MultiPartUpload from a previous state."""
@@ -230,6 +246,8 @@ class S3InsertInputs:
aws_secret_access_key: str | None = None
compression: str | None = None
exclude_events: list[str] | None = None
+ encryption: str | None = None
+ kms_key_id: str | None = None
def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3MultiPartUpload, str]:
@@ -241,7 +259,7 @@ def initialize_and_resume_multipart_upload(inputs: S3InsertInputs) -> tuple[S3Mu
aws_access_key_id=inputs.aws_access_key_id,
aws_secret_access_key=inputs.aws_secret_access_key,
)
- s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key)
+ s3_upload = S3MultiPartUpload(s3_client, inputs.bucket_name, key, inputs.encryption, inputs.kms_key_id)
details = activity.info().heartbeat_details
@@ -442,6 +460,8 @@ async def run(self, inputs: S3BatchExportInputs):
data_interval_end=data_interval_end.isoformat(),
compression=inputs.compression,
exclude_events=inputs.exclude_events,
+ encryption=inputs.encryption,
+ kms_key_id=inputs.kms_key_id,
)
try:
await workflow.execute_activity(
diff --git a/posthog/test/test_datetime.py b/posthog/test/test_datetime.py
new file mode 100644
index 0000000000000..b25fa7098f9b5
--- /dev/null
+++ b/posthog/test/test_datetime.py
@@ -0,0 +1,33 @@
+from datetime import datetime, timezone
+
+from posthog.datetime import start_of_hour, start_of_day, end_of_day, start_of_week, start_of_month
+
+
+def test_start_of_hour():
+ assert start_of_hour(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-08T12:00:00+00:00"
+ )
+
+
+def test_start_of_day():
+ assert start_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-08T00:00:00+00:00"
+ )
+
+
+def test_end_of_day():
+ assert end_of_day(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime(
+ 2023, 2, 8, 23, 59, 59, 999999, tzinfo=timezone.utc
+ )
+
+
+def test_start_of_week():
+ assert start_of_week(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-05T00:00:00+00:00"
+ )
+
+
+def test_start_of_month():
+ assert start_of_month(datetime.fromisoformat("2023-02-08T12:05:23+00:00")) == datetime.fromisoformat(
+ "2023-02-01T00:00:00+00:00"
+ )
diff --git a/posthog/test/test_decorators.py b/posthog/test/test_decorators.py
index 9cbb181c3f261..a6bc176072377 100644
--- a/posthog/test/test_decorators.py
+++ b/posthog/test/test_decorators.py
@@ -1,12 +1,18 @@
-from posthog.decorators import cached_by_filters
+from datetime import datetime
+from freezegun import freeze_time
+from posthog.decorators import cached_by_filters, is_stale
from django.core.cache import cache
from rest_framework.test import APIRequestFactory
from rest_framework.viewsets import GenericViewSet
from rest_framework.response import Response
+from posthog.models.filters.filter import Filter
+from posthog.models.filters.path_filter import PathFilter
+from posthog.models.filters.retention_filter import RetentionFilter
+from posthog.models.filters.stickiness_filter import StickinessFilter
-from posthog.test.base import APIBaseTest
+from posthog.test.base import APIBaseTest, BaseTest
from posthog.api import router
factory = APIRequestFactory()
@@ -22,7 +28,7 @@ def calculate_with_filters(self, request):
return {"result": "bla"}
-class TestDecorators(APIBaseTest):
+class TestCachedByFiltersDecorator(APIBaseTest):
def setUp(self) -> None:
cache.clear()
@@ -61,3 +67,161 @@ def test_cache_bypass_with_invalidation_key_param(self) -> None:
response = self.client.get(f"/api/dummy", data={"cache_invalidation_key": "abc"}).json()
assert response["is_cached"] is False
+
+ def test_discards_stale_response(self) -> None:
+ with freeze_time("2023-02-08T12:05:23Z"):
+ # cache the result
+ self.client.get(f"/api/dummy").json()
+
+ with freeze_time("2023-02-10T12:00:00Z"):
+ # we don't need to add filters, since -7d with a
+ # daily interval is the default
+ response = self.client.get(f"/api/dummy").json()
+ assert response["is_cached"] is False
+
+
+class TestIsStaleHelper(BaseTest):
+ cached_response = {"last_refresh": datetime.fromisoformat("2023-02-08T12:05:23+00:00"), "result": "bla"}
+
+ def test_keeps_fresh_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T12:59:59Z"):
+ filter = Filter(data={"interval": "hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T13:00:00Z"):
+ filter = Filter(data={"interval": "hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_daily_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = Filter(data={"interval": "day"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_daily_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = Filter(data={"interval": "day"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_weekly_result(self) -> None:
+ with freeze_time("2023-02-11T23:59:59Z"):
+ filter = Filter(data={"interval": "week"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_weekly_result(self) -> None:
+ with freeze_time("2023-02-12T00:00:00Z"):
+ filter = Filter(data={"interval": "week"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_monthly_result(self) -> None:
+ with freeze_time("2023-02-28T23:59:59Z"):
+ filter = Filter(data={"interval": "month"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_monthly_result(self) -> None:
+ with freeze_time("2023-03-01T00:00:00Z"):
+ filter = Filter(data={"interval": "month"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_result_from_fixed_range(self) -> None:
+ filter = Filter(data={"interval": "day", "date_from": "2000-01-01", "date_to": "2000-01-10"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_keeps_fresh_result_with_date_to_in_future(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = Filter(data={"interval": "day", "date_to": "2999-01-01"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_keeps_fresh_stickiness_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = StickinessFilter(data={}, team=self.team)
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_stickiness_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = StickinessFilter(data={}, team=self.team)
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_path_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = PathFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_path_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = PathFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_retention_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T12:59:59Z"):
+ filter = RetentionFilter(data={"period": "Hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_retention_hourly_result(self) -> None:
+ with freeze_time("2023-02-08T13:00:00Z"):
+ filter = RetentionFilter(data={"period": "Hour"})
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
+
+ def test_keeps_fresh_retention_result(self) -> None:
+ with freeze_time("2023-02-08T23:59:59Z"):
+ filter = RetentionFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is False
+
+ def test_discards_stale_retention_result(self) -> None:
+ with freeze_time("2023-02-09T00:00:00Z"):
+ filter = RetentionFilter()
+
+ stale = is_stale(self.team, filter, self.cached_response)
+
+ assert stale is True
diff --git a/posthog/utils.py b/posthog/utils.py
index ddbb32bfb2a5a..5e605415ebc3e 100644
--- a/posthog/utils.py
+++ b/posthog/utils.py
@@ -28,11 +28,11 @@
cast,
)
from urllib.parse import urljoin, urlparse
-from zoneinfo import ZoneInfo
import lzstring
import posthoganalytics
import pytz
+from zoneinfo import ZoneInfo
import structlog
from celery.schedules import crontab
from dateutil import parser
@@ -128,13 +128,13 @@ def get_previous_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.d
period_end: datetime.datetime = datetime.datetime.combine(
at - datetime.timedelta(days=1),
datetime.time.max,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very end of the previous day
period_start: datetime.datetime = datetime.datetime.combine(
period_end,
datetime.time.min,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very start of the previous day
return (period_start, period_end)
@@ -152,13 +152,13 @@ def get_current_day(at: Optional[datetime.datetime] = None) -> Tuple[datetime.da
period_end: datetime.datetime = datetime.datetime.combine(
at,
datetime.time.max,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very end of the reference day
period_start: datetime.datetime = datetime.datetime.combine(
period_end,
datetime.time.min,
- tzinfo=pytz.UTC,
+ tzinfo=ZoneInfo("UTC"),
) # very start of the reference day
return (period_start, period_end)
@@ -1087,7 +1087,7 @@ def cast_timestamp_or_now(timestamp: Optional[Union[timezone.datetime, str]]) ->
if isinstance(timestamp, str):
timestamp = parser.isoparse(timestamp)
else:
- timestamp = timestamp.astimezone(pytz.utc)
+ timestamp = timestamp.astimezone(ZoneInfo("UTC"))
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py
index 10e61444e8250..dcb1b2297216f 100644
--- a/posthog/warehouse/models/table.py
+++ b/posthog/warehouse/models/table.py
@@ -8,6 +8,7 @@
StringDatabaseField,
IntegerDatabaseField,
DateTimeDatabaseField,
+ DateDatabaseField,
StringJSONDatabaseField,
BooleanDatabaseField,
StringArrayDatabaseField,
@@ -20,6 +21,7 @@
"String": StringDatabaseField,
"DateTime64": DateTimeDatabaseField,
"DateTime32": DateTimeDatabaseField,
+ "Date": DateDatabaseField,
"UInt8": IntegerDatabaseField,
"UInt16": IntegerDatabaseField,
"UInt32": IntegerDatabaseField,
@@ -47,6 +49,7 @@ class DataWarehouseTable(CreatedMetaFields, UUIDModel, DeletedMetaFields):
class TableFormat(models.TextChoices):
CSV = "CSV", "CSV"
Parquet = "Parquet", "Parquet"
+ JSON = "JSONEachRow", "JSON"
name: models.CharField = models.CharField(max_length=128)
format: models.CharField = models.CharField(max_length=128, choices=TableFormat.choices)
diff --git a/requirements.in b/requirements.in
index 0fecfd58aad1f..0657d57392b54 100644
--- a/requirements.in
+++ b/requirements.in
@@ -9,7 +9,7 @@ antlr4-python3-runtime==4.13.0
amqp==2.6.0
boto3==1.26.66
boto3-stubs[s3]
-brotli==1.0.9
+brotli==1.1.0
celery==4.4.7
celery-redbeat==2.0.0
clickhouse-driver==0.2.4
@@ -78,7 +78,7 @@ temporalio==1.1.0
token-bucket==0.3.0
toronado==0.1.0
webdriver_manager==3.8.5
-whitenoise==5.2.0
+whitenoise==6.5.0
mimesis==5.2.1
more-itertools==9.0.0
django-two-factor-auth==1.14.0
diff --git a/requirements.txt b/requirements.txt
index 972c09cea5b08..2190093813d3c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -51,8 +51,10 @@ botocore==1.29.66
# s3transfer
botocore-stubs==1.29.130
# via boto3-stubs
-brotli==1.0.9
+brotli==1.1.0
# via -r requirements.in
+cachetools==5.3.1
+ # via google-auth
celery==4.4.7
# via
# -r requirements.in
@@ -507,7 +509,7 @@ vine==1.3.0
# celery
webdriver-manager==3.8.5
# via -r requirements.in
-whitenoise==5.2.0
+whitenoise==6.5.0
# via -r requirements.in
wsproto==1.1.0
# via trio-websocket
diff --git a/runtime.txt b/runtime.txt
deleted file mode 100644
index 97691386f3a6e..0000000000000
--- a/runtime.txt
+++ /dev/null
@@ -1 +0,0 @@
-python-3.10.10