@@ -294,6 +300,7 @@ const Content = ({
saveAsInsight,
queryId,
pollResponse,
+ editorKey,
}: any): JSX.Element | null => {
if (activeTab === OutputTab.Results) {
if (responseError) {
@@ -310,7 +317,9 @@ const Content = ({
return responseLoading ? (
) : !response ? (
-
Query results will appear here
+
+ Query results will appear here
+
) : (
Query be results will be visualized here
+
+ Query results will be visualized here
+
) : (
+
+
+ )
+ }
+
return null
}
diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx
new file mode 100644
index 0000000000000..817f440cc1014
--- /dev/null
+++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/InfoTab.tsx
@@ -0,0 +1,102 @@
+import { LemonButton, Tooltip } from '@posthog/lemon-ui'
+import { useActions, useValues } from 'kea'
+import { LemonTable } from 'lib/lemon-ui/LemonTable'
+import { humanFriendlyDetailedTime } from 'lib/utils'
+
+import { multitabEditorLogic } from '../multitabEditorLogic'
+import { infoTabLogic } from './infoTabLogic'
+
+interface InfoTabProps {
+ codeEditorKey: string
+}
+
+export function InfoTab({ codeEditorKey }: InfoTabProps): JSX.Element {
+ const { sourceTableItems } = useValues(infoTabLogic({ codeEditorKey: codeEditorKey }))
+ const { editingView, isEditingMaterializedView } = useValues(multitabEditorLogic)
+ const { runDataWarehouseSavedQuery } = useActions(multitabEditorLogic)
+
+ return (
+
+
+
Materialization
+
+ {isEditingMaterializedView ? (
+
+ {editingView?.last_run_at ? (
+ `Last run at ${humanFriendlyDetailedTime(editingView.last_run_at)}`
+ ) : (
+
+ Materialization scheduled
+
+ )}
+
editingView && runDataWarehouseSavedQuery(editingView.id)}
+ className="mt-2"
+ type="secondary"
+ >
+ Run now
+
+
+ ) : (
+
editingView && runDataWarehouseSavedQuery(editingView.id)}
+ type="primary"
+ disabledReason={editingView ? undefined : 'You must save the view first'}
+ >
+ Materialize
+
+ )}
+
+
+
+
Dependencies
+
+ Dependencies are tables that this query uses. See when a source or materialized table was last run.
+
+
+
name,
+ },
+ {
+ key: 'Type',
+ title: 'Type',
+ render: (_, { type }) => type,
+ },
+ {
+ key: 'Status',
+ title: 'Status',
+ render: (_, { type, status }) => {
+ if (type === 'source') {
+ return (
+
+ N/A
+
+ )
+ }
+ return status
+ },
+ },
+ {
+ key: 'Last run at',
+ title: 'Last run at',
+ render: (_, { type, last_run_at }) => {
+ if (type === 'source') {
+ return (
+
+ N/A
+
+ )
+ }
+ return humanFriendlyDetailedTime(last_run_at)
+ },
+ },
+ ]}
+ dataSource={sourceTableItems}
+ />
+
+ )
+}
diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts
new file mode 100644
index 0000000000000..4510e80db6693
--- /dev/null
+++ b/frontend/src/scenes/data-warehouse/editor/OutputPaneTabs/infoTabLogic.ts
@@ -0,0 +1,63 @@
+import { connect, kea, key, path, props, selectors } from 'kea'
+import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic'
+import { dataWarehouseViewsLogic } from 'scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic'
+
+import { multitabEditorLogic } from '../multitabEditorLogic'
+import type { infoTabLogicType } from './infoTabLogicType'
+
+export interface InfoTableRow {
+ name: string
+ type: 'source' | 'table'
+ status?: string
+ last_run_at?: string
+}
+
+export interface InfoTabLogicProps {
+ codeEditorKey: string
+}
+
+export const infoTabLogic = kea([
+ path(['data-warehouse', 'editor', 'outputPaneTabs', 'infoTabLogic']),
+ props({} as InfoTabLogicProps),
+ key((props) => props.codeEditorKey),
+ connect((props: InfoTabLogicProps) => ({
+ values: [
+ multitabEditorLogic({ key: props.codeEditorKey }),
+ ['metadata'],
+ databaseTableListLogic,
+ ['posthogTablesMap', 'dataWarehouseTablesMap'],
+ dataWarehouseViewsLogic,
+ ['dataWarehouseSavedQueryMap'],
+ ],
+ })),
+ selectors({
+ sourceTableItems: [
+ (s) => [s.metadata, s.dataWarehouseSavedQueryMap],
+ (metadata, dataWarehouseSavedQueryMap) => {
+ if (!metadata) {
+ return []
+ }
+ return (
+ metadata.table_names?.map((table_name) => {
+ const view = dataWarehouseSavedQueryMap[table_name]
+ if (view) {
+ return {
+ name: table_name,
+ type: 'table',
+ status: view.status,
+ last_run_at: view.last_run_at || 'never',
+ }
+ }
+
+ return {
+ name: table_name,
+ type: 'source',
+ status: undefined,
+ last_run_at: undefined,
+ }
+ }) || []
+ )
+ },
+ ],
+ }),
+])
diff --git a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx
index 02c2457a0381e..7bfbe9310d7e8 100644
--- a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx
+++ b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx
@@ -36,7 +36,8 @@ export function QueryWindow(): JSX.Element {
})
const { allTabs, activeModelUri, queryInput, editingView, sourceQuery } = useValues(logic)
- const { selectTab, deleteTab, createTab, setQueryInput, runQuery, setError, setIsValidView } = useActions(logic)
+ const { selectTab, deleteTab, createTab, setQueryInput, runQuery, setError, setIsValidView, setMetadata } =
+ useActions(logic)
return (
@@ -51,7 +52,9 @@ export function QueryWindow(): JSX.Element {
{editingView && (
- Editing view "{editingView.name}"
+
+ Editing {editingView.status ? 'materialized view' : 'view'} "{editingView.name}"
+
)}
{
+ setMetadata(metadata)
+ },
}}
/>
diff --git a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx
similarity index 63%
rename from frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts
rename to frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx
index cfd559e59506a..c45ea5559fb5a 100644
--- a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts
+++ b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.tsx
@@ -1,9 +1,9 @@
+import { Tooltip } from '@posthog/lemon-ui'
import Fuse from 'fuse.js'
import { connect, kea, path, selectors } from 'kea'
import { router } from 'kea-router'
import { subscriptions } from 'kea-subscriptions'
-import { FEATURE_FLAGS } from 'lib/constants'
-import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
+import { IconCalculate, IconClipboardEdit } from 'lib/lemon-ui/icons'
import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic'
import { sceneLogic } from 'scenes/sceneLogic'
import { Scene } from 'scenes/sceneTypes'
@@ -42,20 +42,6 @@ const savedQueriesfuse = new Fuse([], {
includeMatches: true,
})
-const nonMaterializedViewsfuse = new Fuse([], {
- keys: [{ name: 'name', weight: 2 }],
- threshold: 0.3,
- ignoreLocation: true,
- includeMatches: true,
-})
-
-const materializedViewsfuse = new Fuse([], {
- keys: [{ name: 'name', weight: 2 }],
- threshold: 0.3,
- ignoreLocation: true,
- includeMatches: true,
-})
-
export const editorSidebarLogic = kea([
path(['data-warehouse', 'editor', 'editorSidebarLogic']),
connect({
@@ -66,8 +52,6 @@ export const editorSidebarLogic = kea([
['dataWarehouseSavedQueries', 'dataWarehouseSavedQueryMapById', 'dataWarehouseSavedQueriesLoading'],
databaseTableListLogic,
['posthogTables', 'dataWarehouseTables', 'databaseLoading', 'views', 'viewsMapById'],
- featureFlagLogic,
- ['featureFlags'],
],
actions: [
editorSceneLogic,
@@ -86,19 +70,13 @@ export const editorSidebarLogic = kea([
s.relevantPosthogTables,
s.relevantDataWarehouseTables,
s.databaseLoading,
- s.relevantNonMaterializedViews,
- s.relevantMaterializedViews,
- s.featureFlags,
],
(
relevantSavedQueries,
dataWarehouseSavedQueriesLoading,
relevantPosthogTables,
relevantDataWarehouseTables,
- databaseLoading,
- relevantNonMaterializedViews,
- relevantMaterializedViews,
- featureFlags
+ databaseLoading
) => [
{
key: 'data-warehouse-sources',
@@ -163,13 +141,19 @@ export const editorSidebarLogic = kea([
key: 'data-warehouse-views',
noun: ['view', 'views'],
loading: dataWarehouseSavedQueriesLoading,
- items: (featureFlags[FEATURE_FLAGS.DATA_MODELING]
- ? relevantNonMaterializedViews
- : relevantSavedQueries
- ).map(([savedQuery, matches]) => ({
+ items: relevantSavedQueries.map(([savedQuery, matches]) => ({
key: savedQuery.id,
name: savedQuery.name,
url: '',
+ icon: savedQuery.status ? (
+
+
+
+ ) : (
+
+
+
+ ),
searchMatch: matches
? {
matchingFields: matches.map((match) => match.key),
@@ -195,16 +179,6 @@ export const editorSidebarLogic = kea([
actions.toggleJoinTableModal()
},
},
- ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && !savedQuery.status
- ? [
- {
- label: 'Materialize',
- onClick: () => {
- actions.runDataWarehouseSavedQuery(savedQuery.id)
- },
- },
- ]
- : []),
{
label: 'Delete',
status: 'danger',
@@ -215,63 +189,6 @@ export const editorSidebarLogic = kea([
],
})),
} as SidebarCategory,
- ...(featureFlags[FEATURE_FLAGS.DATA_MODELING]
- ? [
- {
- key: 'data-warehouse-materialized-views',
- noun: ['materialized view', 'materialized views'],
- loading: dataWarehouseSavedQueriesLoading,
- items: relevantMaterializedViews.map(([materializedView, matches]) => ({
- key: materializedView.id,
- name: materializedView.name,
- url: '',
- searchMatch: matches
- ? {
- matchingFields: matches.map((match) => match.key),
- nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices,
- }
- : null,
- onClick: () => {
- actions.selectSchema(materializedView)
- },
- menuItems: [
- {
- label: 'Edit view definition',
- onClick: () => {
- multitabEditorLogic({
- key: `hogQLQueryEditor/${router.values.location.pathname}`,
- }).actions.createTab(materializedView.query.query, materializedView)
- },
- },
- {
- label: 'Add join',
- onClick: () => {
- actions.selectSourceTable(materializedView.name)
- actions.toggleJoinTableModal()
- },
- },
- ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && materializedView.status
- ? [
- {
- label: 'Run',
- onClick: () => {
- actions.runDataWarehouseSavedQuery(materializedView.id)
- },
- },
- ]
- : []),
- {
- label: 'Delete',
- status: 'danger',
- onClick: () => {
- actions.deleteDataWarehouseSavedQuery(materializedView.id)
- },
- },
- ],
- })),
- },
- ]
- : []),
],
],
nonMaterializedViews: [
@@ -327,28 +244,6 @@ export const editorSidebarLogic = kea([
return dataWarehouseSavedQueries.map((savedQuery) => [savedQuery, null])
},
],
- relevantNonMaterializedViews: [
- (s) => [s.nonMaterializedViews, navigation3000Logic.selectors.searchTerm],
- (nonMaterializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => {
- if (searchTerm) {
- return nonMaterializedViewsfuse
- .search(searchTerm)
- .map((result) => [result.item, result.matches as FuseSearchMatch[]])
- }
- return nonMaterializedViews.map((view) => [view, null])
- },
- ],
- relevantMaterializedViews: [
- (s) => [s.materializedViews, navigation3000Logic.selectors.searchTerm],
- (materializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => {
- if (searchTerm) {
- return materializedViewsfuse
- .search(searchTerm)
- .map((result) => [result.item, result.matches as FuseSearchMatch[]])
- }
- return materializedViews.map((view) => [view, null])
- },
- ],
})),
subscriptions({
dataWarehouseTables: (dataWarehouseTables) => {
diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx
index 740ea33aced83..94995a446ae2d 100644
--- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx
+++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx
@@ -48,7 +48,12 @@ export const multitabEditorLogic = kea([
connect({
actions: [
dataWarehouseViewsLogic,
- ['deleteDataWarehouseSavedQuerySuccess', 'createDataWarehouseSavedQuerySuccess'],
+ [
+ 'loadDataWarehouseSavedQueriesSuccess',
+ 'deleteDataWarehouseSavedQuerySuccess',
+ 'createDataWarehouseSavedQuerySuccess',
+ 'runDataWarehouseSavedQuery',
+ ],
],
}),
actions({
@@ -66,13 +71,13 @@ export const multitabEditorLogic = kea([
initialize: true,
saveAsView: true,
saveAsViewSubmit: (name: string) => ({ name }),
- setMetadata: (query: string, metadata: HogQLMetadataResponse) => ({ query, metadata }),
saveAsInsight: true,
saveAsInsightSubmit: (name: string) => ({ name }),
setCacheLoading: (loading: boolean) => ({ loading }),
setError: (error: string | null) => ({ error }),
setIsValidView: (isValidView: boolean) => ({ isValidView }),
setSourceQuery: (sourceQuery: DataVisualizationNode) => ({ sourceQuery }),
+ setMetadata: (metadata: HogQLMetadataResponse) => ({ metadata }),
editView: (query: string, view: DataWarehouseSavedQuery) => ({ query, view }),
}),
propsChanged(({ actions, props }, oldProps) => {
@@ -80,7 +85,7 @@ export const multitabEditorLogic = kea([
actions.initialize()
}
}),
- reducers({
+ reducers(({ props }) => ({
cacheLoading: [
true,
{
@@ -149,7 +154,14 @@ export const multitabEditorLogic = kea([
setIsValidView: (_, { isValidView }) => isValidView,
},
],
- }),
+ metadata: [
+ null as HogQLMetadataResponse | null,
+ {
+ setMetadata: (_, { metadata }) => metadata,
+ },
+ ],
+ editorKey: [props.key],
+ })),
listeners(({ values, props, actions, asyncActions }) => ({
editView: ({ query, view }) => {
const maybeExistingTab = values.allTabs.find((tab) => tab.view?.id === view.id)
@@ -388,6 +400,15 @@ export const multitabEditorLogic = kea([
router.actions.push(urls.insightView(insight.short_id))
},
+ loadDataWarehouseSavedQueriesSuccess: ({ dataWarehouseSavedQueries }) => {
+ // keep tab views up to date
+ const newTabs = values.allTabs.map((tab) => ({
+ ...tab,
+ view: dataWarehouseSavedQueries.find((v) => v.id === tab.view?.id),
+ }))
+ actions.setTabs(newTabs)
+ actions.updateState()
+ },
deleteDataWarehouseSavedQuerySuccess: ({ payload: viewId }) => {
const tabToRemove = values.allTabs.find((tab) => tab.view?.id === viewId)
if (tabToRemove) {
@@ -412,7 +433,7 @@ export const multitabEditorLogic = kea([
lemonToast.success('View updated')
},
})),
- subscriptions(({ props, actions }) => ({
+ subscriptions(({ props, actions, values }) => ({
activeModelUri: (activeModelUri) => {
if (props.monaco) {
const _model = props.monaco.editor.getModel(activeModelUri.uri)
@@ -421,6 +442,11 @@ export const multitabEditorLogic = kea([
actions.runQuery(undefined, true)
}
},
+ allTabs: () => {
+ // keep selected tab up to date
+ const activeTab = values.allTabs.find((tab) => tab.uri.path === values.activeModelUri?.uri.path)
+ activeTab && actions.selectTab(activeTab)
+ },
})),
selectors({
exportContext: [
@@ -435,5 +461,11 @@ export const multitabEditorLogic = kea([
} as ExportContext
},
],
+ isEditingMaterializedView: [
+ (s) => [s.editingView],
+ (editingView) => {
+ return !!editingView?.status
+ },
+ ],
}),
])
diff --git a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts
index 659c79b440635..4e06f611dc49d 100644
--- a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts
+++ b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts
@@ -5,6 +5,7 @@ import type { outputPaneLogicType } from './outputPaneLogicType'
export enum OutputTab {
Results = 'results',
Visualization = 'visualization',
+ Info = 'info',
}
export const outputPaneLogic = kea([
diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx
index d66a0285526ba..ae61570189150 100644
--- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx
+++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx
@@ -70,8 +70,13 @@ export const dataWarehouseViewsLogic = kea([
actions.loadDatabase()
},
runDataWarehouseSavedQuery: async ({ viewId }) => {
- await api.dataWarehouseSavedQueries.run(viewId)
- actions.loadDataWarehouseSavedQueries()
+ try {
+ await api.dataWarehouseSavedQueries.run(viewId)
+ lemonToast.success('Materialization started')
+ actions.loadDataWarehouseSavedQueries()
+ } catch (error) {
+ lemonToast.error(`Failed to run materialization`)
+ }
},
})),
selectors({
@@ -92,6 +97,17 @@ export const dataWarehouseViewsLogic = kea([
)
},
],
+ dataWarehouseSavedQueryMap: [
+ (s) => [s.dataWarehouseSavedQueries],
+ (dataWarehouseSavedQueries) => {
+ return (
+ dataWarehouseSavedQueries?.reduce((acc, cur) => {
+ acc[cur.name] = cur
+ return acc
+ }, {} as Record) ?? {}
+ )
+ },
+ ],
}),
events(({ actions, cache }) => ({
afterMount: () => {
diff --git a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx
index 95938242c143d..e7797f03de7ba 100644
--- a/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx
+++ b/frontend/src/scenes/experiments/ExperimentView/DataCollectionCalculator.tsx
@@ -3,10 +3,10 @@ import { LemonBanner, LemonInput, Link, Tooltip } from '@posthog/lemon-ui'
import { BindLogic, useActions, useValues } from 'kea'
import { LemonSlider } from 'lib/lemon-ui/LemonSlider'
import { humanFriendlyNumber } from 'lib/utils'
-import { insightDataLogic } from 'scenes/insights/insightDataLogic'
import { insightLogic } from 'scenes/insights/insightLogic'
import { Query } from '~/queries/Query/Query'
+import { ExperimentFunnelsQuery, ExperimentTrendsQuery, NodeKind } from '~/queries/schema'
import { ExperimentIdType, InsightType } from '~/types'
import { MetricInsightId } from '../constants'
@@ -120,7 +120,16 @@ export function DataCollectionCalculator({ experimentId }: ExperimentCalculatorP
syncWithUrl: false,
})
const { insightProps } = useValues(insightLogicInstance)
- const { query } = useValues(insightDataLogic(insightProps))
+ let query = null
+ if (experiment.metrics.length > 0) {
+ query = {
+ kind: NodeKind.InsightVizNode,
+ source:
+ metricType === InsightType.FUNNELS
+ ? (experiment.metrics[0] as ExperimentFunnelsQuery).funnels_query
+ : (experiment.metrics[0] as ExperimentTrendsQuery).count_query,
+ }
+ }
const funnelConversionRate = conversionMetrics?.totalRate * 100 || 0
diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts
index 0a48c272e929b..54ed637e4d2bd 100644
--- a/frontend/src/scenes/funnels/funnelDataLogic.ts
+++ b/frontend/src/scenes/funnels/funnelDataLogic.ts
@@ -169,6 +169,7 @@ export const funnelDataLogic = kea([
if (
// TODO: Ideally we don't check filters anymore, but tests are still using this
insightData?.filters?.insight !== InsightType.FUNNELS &&
+ querySource &&
querySource?.kind !== NodeKind.FunnelsQuery
) {
return []
@@ -275,6 +276,7 @@ export const funnelDataLogic = kea([
if (
// TODO: Ideally we don't check filters anymore, but tests are still using this
insightData?.filters?.insight !== InsightType.FUNNELS &&
+ querySource &&
querySource?.kind !== NodeKind.FunnelsQuery
) {
return false
diff --git a/frontend/src/scenes/settings/organization/Members.tsx b/frontend/src/scenes/settings/organization/Members.tsx
index 3659b22c952ef..997582fa81982 100644
--- a/frontend/src/scenes/settings/organization/Members.tsx
+++ b/frontend/src/scenes/settings/organization/Members.tsx
@@ -19,7 +19,6 @@ import {
} from 'lib/utils/permissioning'
import { useEffect } from 'react'
import { twoFactorLogic } from 'scenes/authentication/twoFactorLogic'
-import { TwoFactorSetupModal } from 'scenes/authentication/TwoFactorSetupModal'
import { membersLogic } from 'scenes/organization/membersLogic'
import { organizationLogic } from 'scenes/organizationLogic'
import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic'
@@ -143,9 +142,9 @@ export function Members(): JSX.Element | null {
const { preflight } = useValues(preflightLogic)
const { user } = useValues(userLogic)
- const { setSearch, ensureAllMembersLoaded, loadAllMembers } = useActions(membersLogic)
+ const { setSearch, ensureAllMembersLoaded } = useActions(membersLogic)
const { updateOrganization } = useActions(organizationLogic)
- const { toggleTwoFactorSetupModal } = useActions(twoFactorLogic)
+ const { openTwoFactorSetupModal } = useActions(twoFactorLogic)
useEffect(() => {
ensureAllMembersLoaded()
@@ -212,14 +211,6 @@ export function Members(): JSX.Element | null {
render: function LevelRender(_, member) {
return (
<>
- {member.user.uuid == user.uuid && (
- {
- userLogic.actions.updateUser({})
- loadAllMembers()
- }}
- />
- )}
toggleTwoFactorSetupModal(true)
+ ? () => openTwoFactorSetupModal()
: undefined
}
data-attr="2fa-enabled"
diff --git a/frontend/src/scenes/settings/user/TwoFactorSettings.tsx b/frontend/src/scenes/settings/user/TwoFactorSettings.tsx
index b9e71ce8575ad..dad73b097f5a9 100644
--- a/frontend/src/scenes/settings/user/TwoFactorSettings.tsx
+++ b/frontend/src/scenes/settings/user/TwoFactorSettings.tsx
@@ -3,7 +3,6 @@ import { LemonButton, LemonModal } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
import { copyToClipboard } from 'lib/utils/copyToClipboard'
import { twoFactorLogic } from 'scenes/authentication/twoFactorLogic'
-import { TwoFactorSetupModal } from 'scenes/authentication/TwoFactorSetupModal'
import { membersLogic } from 'scenes/organization/membersLogic'
import { userLogic } from 'scenes/userLogic'
@@ -13,13 +12,8 @@ export function TwoFactorSettings(): JSX.Element {
const { updateUser } = useActions(userLogic)
const { loadMemberUpdates } = useActions(membersLogic)
- const {
- generateBackupCodes,
- disable2FA,
- toggleTwoFactorSetupModal,
- toggleDisable2FAModal,
- toggleBackupCodesModal,
- } = useActions(twoFactorLogic)
+ const { generateBackupCodes, disable2FA, openTwoFactorSetupModal, toggleDisable2FAModal, toggleBackupCodesModal } =
+ useActions(twoFactorLogic)
const handleSuccess = (): void => {
updateUser({})
@@ -28,8 +22,6 @@ export function TwoFactorSettings(): JSX.Element {
return (
-
-
{isDisable2FAModalOpen && (
2FA is not enabled
- toggleTwoFactorSetupModal(true)}>
+ openTwoFactorSetupModal()}>
Set up 2FA
diff --git a/package.json b/package.json
index 14bf97b7876c0..d24367e948792 100644
--- a/package.json
+++ b/package.json
@@ -161,7 +161,7 @@
"pmtiles": "^2.11.0",
"postcss": "^8.4.31",
"postcss-preset-env": "^9.3.0",
- "posthog-js": "1.200.2",
+ "posthog-js": "1.201.1",
"posthog-js-lite": "3.0.0",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index a1bda4e655675..ff1979dfa7ebc 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -305,8 +305,8 @@ dependencies:
specifier: ^9.3.0
version: 9.3.0(postcss@8.4.31)
posthog-js:
- specifier: 1.200.2
- version: 1.200.2
+ specifier: 1.201.1
+ version: 1.201.1
posthog-js-lite:
specifier: 3.0.0
version: 3.0.0
@@ -11799,8 +11799,8 @@ packages:
engines: {node: '>=12'}
dev: true
- /dunder-proto@1.0.0:
- resolution: {integrity: sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A==}
+ /dunder-proto@1.0.1:
+ resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==}
engines: {node: '>= 0.4'}
dependencies:
call-bind-apply-helpers: 1.0.1
@@ -13308,7 +13308,7 @@ packages:
engines: {node: '>= 0.4'}
dependencies:
call-bind-apply-helpers: 1.0.1
- dunder-proto: 1.0.0
+ dunder-proto: 1.0.1
es-define-property: 1.0.1
es-errors: 1.3.0
es-object-atoms: 1.0.0
@@ -17902,8 +17902,8 @@ packages:
resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==}
dev: false
- /posthog-js@1.200.2:
- resolution: {integrity: sha512-hDdnzn/FWz+lR0qoYn8TJ7UAVzJSH48ceM2rYXrrZZa8EqBKaUKLf1LWK505/s3QVjK972mbF8wjF+pRDSlwOg==}
+ /posthog-js@1.201.1:
+ resolution: {integrity: sha512-srzbJLIzGp0DirGFhadXE1BDB9JGsfaTKLNX3PWehtVf3TSd4i1nX75hQHJmqrzRkGyMNMArQAuVCQN3aWMn3A==}
dependencies:
core-js: 3.39.0
fflate: 0.4.8
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index efc67f79e3f40..4ae2d364d1f03 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -2,7 +2,6 @@
import json
from typing import Optional
from unittest.mock import call, patch
-from dateutil.relativedelta import relativedelta
from django.core.cache import cache
from django.db import connection
@@ -41,7 +40,6 @@
ClickhouseTestMixin,
FuzzyInt,
QueryMatchingTest,
- _create_event,
_create_person,
flush_persons_and_events,
snapshot_clickhouse_queries,
@@ -6130,18 +6128,6 @@ def assert_expected_response(
if expected_reason is not None:
self.assertEqual(response_data.get("reason"), expected_reason)
- def create_feature_flag_called_event(
- self, feature_flag_key: str, response: Optional[bool] = True, datetime: Optional[datetime.datetime] = None
- ):
- timestamp = datetime or now() - relativedelta(hours=12)
- _create_event(
- event="$feature_flag_called",
- distinct_id="person1",
- properties={"$feature_flag": feature_flag_key, "$feature_flag_response": response},
- team=self.team,
- timestamp=timestamp,
- )
-
def test_flag_status_reasons(self):
FeatureFlag.objects.all().delete()
@@ -6165,7 +6151,7 @@ def test_flag_status_reasons(self):
team=self.team,
active=False,
)
- self.create_feature_flag_called_event(disabled_flag.key)
+
self.assert_expected_response(disabled_flag.id, FeatureFlagStatus.ACTIVE)
# Request status for flag that has super group rolled out to <100%
@@ -6176,7 +6162,7 @@ def test_flag_status_reasons(self):
active=True,
filters={"super_groups": [{"rollout_percentage": 50, "properties": []}]},
)
- self.create_feature_flag_called_event(fifty_percent_super_group_flag.key)
+
self.assert_expected_response(fifty_percent_super_group_flag.id, FeatureFlagStatus.ACTIVE)
# Request status for flag that has super group rolled out to 100% and specific properties
@@ -6201,7 +6187,7 @@ def test_flag_status_reasons(self):
]
},
)
- self.create_feature_flag_called_event(fully_rolled_out_super_group_flag_with_properties.key)
+
self.assert_expected_response(fully_rolled_out_super_group_flag_with_properties.id, FeatureFlagStatus.ACTIVE)
# Request status for flag that has super group rolled out to 100% and has no specific properties
@@ -6231,7 +6217,7 @@ def test_flag_status_reasons(self):
active=True,
filters={"holdout_groups": [{"rollout_percentage": 50, "properties": []}]},
)
- self.create_feature_flag_called_event(fifty_percent_holdout_group_flag.key)
+
self.assert_expected_response(fifty_percent_holdout_group_flag.id, FeatureFlagStatus.ACTIVE)
# Request status for flag that has holdout group rolled out to 100% and specific properties
@@ -6256,7 +6242,7 @@ def test_flag_status_reasons(self):
]
},
)
- self.create_feature_flag_called_event(fully_rolled_out_holdout_group_flag_with_properties.key)
+
self.assert_expected_response(fully_rolled_out_holdout_group_flag_with_properties.id, FeatureFlagStatus.ACTIVE)
# Request status for flag that has holdout group rolled out to 100% and has no specific properties
@@ -6293,7 +6279,7 @@ def test_flag_status_reasons(self):
}
},
)
- self.create_feature_flag_called_event(multivariate_flag_no_rolled_out_variants.key)
+
self.assert_expected_response(multivariate_flag_no_rolled_out_variants.id, FeatureFlagStatus.ACTIVE)
# Request status for multivariate flag with no variants set to 100%
@@ -6337,7 +6323,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(multivariate_flag_rolled_out_variant_no_rolled_out_release.key)
+
self.assert_expected_response(
multivariate_flag_rolled_out_variant_no_rolled_out_release.id,
FeatureFlagStatus.ACTIVE,
@@ -6361,7 +6347,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(multivariate_flag_rolled_out_release_condition_half_variant.key)
+
self.assert_expected_response(
multivariate_flag_rolled_out_release_condition_half_variant.id,
FeatureFlagStatus.ACTIVE,
@@ -6396,7 +6382,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(multivariate_flag_rolled_out_variant_rolled_out_filtered_release.key)
+
self.assert_expected_response(
multivariate_flag_rolled_out_variant_rolled_out_filtered_release.id,
FeatureFlagStatus.ACTIVE,
@@ -6431,7 +6417,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(multivariate_flag_filtered_rolled_out_release_with_override.key)
+
self.assert_expected_response(
multivariate_flag_filtered_rolled_out_release_with_override.id,
FeatureFlagStatus.ACTIVE,
@@ -6509,7 +6495,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(boolean_flag_no_rolled_out_release_conditions.key)
+
self.assert_expected_response(
boolean_flag_no_rolled_out_release_conditions.id,
FeatureFlagStatus.ACTIVE,
@@ -6570,39 +6556,7 @@ def test_flag_status_reasons(self):
],
},
)
- self.create_feature_flag_called_event(boolean_flag_no_rolled_out_release_condition_recently_evaluated.key)
- self.assert_expected_response(
- boolean_flag_no_rolled_out_release_condition_recently_evaluated.id, FeatureFlagStatus.ACTIVE
- )
- # Request status for a boolean flag with no rolled out release conditions, and has
- # been called, but not recently
- boolean_flag_rolled_out_release_condition_not_recently_evaluated = FeatureFlag.objects.create(
- name="Boolean flag with a release condition set to 100%",
- key="boolean-not-recently-evaluated-flag",
- team=self.team,
- active=True,
- filters={
- "groups": [
- {
- "properties": [
- {
- "key": "name",
- "type": "person",
- "value": ["Smith"],
- "operator": "contains",
- }
- ],
- "rollout_percentage": 50,
- },
- ],
- },
- )
- self.create_feature_flag_called_event(
- boolean_flag_rolled_out_release_condition_not_recently_evaluated.key, True, now() - relativedelta(days=31)
- )
self.assert_expected_response(
- boolean_flag_rolled_out_release_condition_not_recently_evaluated.id,
- FeatureFlagStatus.INACTIVE,
- "Flag has not been evaluated recently",
+ boolean_flag_no_rolled_out_release_condition_recently_evaluated.id, FeatureFlagStatus.ACTIVE
)
diff --git a/posthog/cdp/site_functions.py b/posthog/cdp/site_functions.py
index 690dc136ea577..f6ece18e28792 100644
--- a/posthog/cdp/site_functions.py
+++ b/posthog/cdp/site_functions.py
@@ -92,7 +92,7 @@ def get_transpiled_function(hog_function: HogFunction) -> str:
"""
let processEvent = undefined;
if ('onEvent' in source) {
- processEvent = function processEvent(globals) {
+ processEvent = function processEvent(globals, posthog) {
if (!('onEvent' in source)) { return; };
const inputs = buildInputs(globals);
const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } };
@@ -123,7 +123,7 @@ def get_transpiled_function(hog_function: HogFunction) -> str:
}
return {
- processEvent: processEvent
+ processEvent: (globals) => processEvent(globals, posthog)
}
}
diff --git a/posthog/cdp/test/test_site_functions.py b/posthog/cdp/test/test_site_functions.py
index 9370cb7266740..0b6c0bc1bb8a6 100644
--- a/posthog/cdp/test/test_site_functions.py
+++ b/posthog/cdp/test/test_site_functions.py
@@ -1,3 +1,4 @@
+import json
import subprocess
import tempfile
from inline_snapshot import snapshot
@@ -71,7 +72,7 @@ def test_get_transpiled_function_basic(self):
};return exports;})();
let processEvent = undefined;
if ('onEvent' in source) {
- processEvent = function processEvent(globals) {
+ processEvent = function processEvent(globals, posthog) {
if (!('onEvent' in source)) { return; };
const inputs = buildInputs(globals);
const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } };
@@ -98,7 +99,7 @@ def test_get_transpiled_function_basic(self):
}
return {
- processEvent: processEvent
+ processEvent: (globals) => processEvent(globals, posthog)
}
}
@@ -129,12 +130,12 @@ def test_get_transpiled_function_with_template_input(self):
assert '__getGlobal("person")' in result
def test_get_transpiled_function_with_filters(self):
- self.hog_function.hog = "export function onEvent(event) { console.log(event.event); }"
+ self.hog_function.hog = "export function onEvent(globals) { console.log(globals); }"
self.hog_function.filters = {"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}]}
result = self.compile_and_run()
- assert "console.log(event.event);" in result
+ assert "console.log(globals);" in result
assert "const filterMatches = " in result
assert '__getGlobal("event") == "$pageview"' in result
assert "const filterMatches = !!(!!((__getGlobal" in result
@@ -249,7 +250,7 @@ def test_get_transpiled_function_with_complex_filters(self):
action.steps = [{"event": "$pageview", "url": "https://example.com"}] # type: ignore
action.save()
- self.hog_function.hog = "export function onEvent(event) { console.log(event.event); }"
+ self.hog_function.hog = "export function onEvent(globals) { console.log(globals); }"
self.hog_function.filters = {
"events": [{"id": "$pageview", "name": "$pageview", "type": "events"}],
"actions": [{"id": str(action.pk), "name": "Test Action", "type": "actions"}],
@@ -258,7 +259,7 @@ def test_get_transpiled_function_with_complex_filters(self):
result = self.compile_and_run()
- assert "console.log(event.event);" in result
+ assert "console.log(globals);" in result
assert "const filterMatches = " in result
assert '__getGlobal("event") == "$pageview"' in result
assert "https://example.com" in result
@@ -283,3 +284,63 @@ def test_get_transpiled_function_with_mappings(self):
assert 'if (!!(!!((__getGlobal("event") == "$autocapture")))) {' in result
assert "const newInputs = structuredClone(inputs);" in result
assert 'newInputs["greeting"] = concat("Hallo, ", __getProperty' in result
+
+ def test_run_function_onload(self):
+ self.hog_function.hog = "export function onLoad({ inputs, posthog }) { console.log(inputs.message); }"
+ self.hog_function.filters = {"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}]}
+ self.hog_function.inputs = {"message": {"value": "Hello World {person.properties.name}"}}
+
+ result = self.compile_and_run()
+ assert "Hello World" in result
+
+ response = self._execute_javascript(
+ result
+ + "().init({ posthog: { get_property: () => ({name: 'Bob'}) }, callback: () => { console.log('Loaded') } })"
+ )
+ assert "Hello World Bob\nLoaded" == response.strip()
+
+ def test_run_function_onevent(self):
+ self.hog_function.hog = "export function onEvent({ inputs }) { console.log(inputs.message); }"
+ # self.hog_function.filters = {"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}]}
+ self.hog_function.inputs = {"message": {"value": "Hello World {event.properties.id}"}}
+ self.hog_function.mappings = [
+ {
+ "inputs": {"greeting": {"value": "Hallo, {person.properties.nonexistent_property}!"}},
+ "filters": {"events": [{"id": "$pageview", "name": "$pageview", "type": "events"}]},
+ }
+ ]
+
+ result = self.compile_and_run()
+ assert "Hello World" in result
+
+ globals = {
+ "event": {"event": "$pageview", "properties": {"id": "banana"}},
+ "groups": {},
+ "person": {"properties": {"name": "Bob"}},
+ }
+ response = self._execute_javascript(
+ result
+ + "().init({ posthog: { get_property: () => ({name: 'Bob'}) }, callback: () => { console.log('Loaded') } }).processEvent("
+ + json.dumps(globals)
+ + ")"
+ )
+ assert "Loaded\nHello World banana" == response.strip()
+
+ globals = {
+ "event": {"event": "$autocapture", "properties": {"id": "banana"}},
+ "groups": {},
+ "person": {"properties": {"name": "Bob"}},
+ }
+ response = self._execute_javascript(
+ result
+ + "().init({ posthog: { get_property: () => ({name: 'Bob'}) }, callback: () => { console.log('Loaded') } }).processEvent("
+ + json.dumps(globals)
+ + ")"
+ )
+ assert "Loaded" == response.strip()
+
+ def _execute_javascript(self, js) -> str:
+ with tempfile.NamedTemporaryFile(delete=False) as f:
+ f.write(js.encode("utf-8"))
+ f.flush()
+ return subprocess.check_output(["node", f.name]).decode("utf-8")
diff --git a/posthog/clickhouse/migrations/0095_add_snapshot_library_tracking.py b/posthog/clickhouse/migrations/0095_add_snapshot_library_tracking.py
new file mode 100644
index 0000000000000..42391c66fa059
--- /dev/null
+++ b/posthog/clickhouse/migrations/0095_add_snapshot_library_tracking.py
@@ -0,0 +1,26 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.session_recordings.sql.session_replay_event_migrations_sql import (
+ DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_LIBRARY_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_LIBRARY_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL,
+ ADD_LIBRARY_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+from posthog.session_recordings.sql.session_replay_event_sql import (
+ SESSION_REPLAY_EVENTS_TABLE_MV_SQL,
+ KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL,
+)
+
+operations = [
+ # we have to drop materialized view first so that we're no longer pulling from kakfa
+ # then we drop the kafka table
+ run_sql_with_exceptions(DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+ run_sql_with_exceptions(DROP_KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # now we can alter the target tables
+ run_sql_with_exceptions(ADD_LIBRARY_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_LIBRARY_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(ADD_LIBRARY_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ # and then recreate the materialized views and kafka tables
+ run_sql_with_exceptions(KAFKA_SESSION_REPLAY_EVENTS_TABLE_SQL()),
+ run_sql_with_exceptions(SESSION_REPLAY_EVENTS_TABLE_MV_SQL()),
+]
diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
index b559f673293ec..406f73008fd28 100644
--- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr
+++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
@@ -481,7 +481,8 @@
size Int64,
event_count Int64,
message_count Int64,
- snapshot_source LowCardinality(Nullable(String))
+ snapshot_source LowCardinality(Nullable(String)),
+ snapshot_library Nullable(String)
) ENGINE = Kafka('test.kafka.broker:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'''
@@ -1496,7 +1497,8 @@
size Int64,
event_count Int64,
message_count Int64,
- snapshot_source LowCardinality(Nullable(String))
+ snapshot_source LowCardinality(Nullable(String)),
+ snapshot_library Nullable(String)
) ENGINE = Kafka('kafka:9092', 'clickhouse_session_replay_events_test', 'group1', 'JSONEachRow')
'''
@@ -2198,8 +2200,10 @@
-- often very useful in incidents or debugging
-- because we batch events we expect message_count to be lower than event_count
event_count SimpleAggregateFunction(sum, Int64),
- -- which source the snapshots came from Android, iOS, Mobile, Web. Web if absent
+ -- which source the snapshots came from Mobile or Web. Web if absent
snapshot_source AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+ -- knowing something is mobile isn't enough, we need to know if e.g. RN or flutter
+ snapshot_library AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
_timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = Distributed('posthog', 'posthog_test', 'sharded_session_replay_events', sipHash64(distinct_id))
@@ -2221,6 +2225,7 @@
`console_error_count` Int64, `size` Int64, `message_count` Int64,
`event_count` Int64,
`snapshot_source` AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+ `snapshot_library` AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
`_timestamp` Nullable(DateTime)
)
AS SELECT
@@ -2252,6 +2257,7 @@
sum(message_count) as message_count,
sum(event_count) as event_count,
argMinState(snapshot_source, first_timestamp) as snapshot_source,
+ argMinState(snapshot_library, first_timestamp) as snapshot_library,
max(_timestamp) as _timestamp
FROM posthog_test.kafka_session_replay_events
group by session_id, team_id
@@ -2787,8 +2793,10 @@
-- often very useful in incidents or debugging
-- because we batch events we expect message_count to be lower than event_count
event_count SimpleAggregateFunction(sum, Int64),
- -- which source the snapshots came from Android, iOS, Mobile, Web. Web if absent
+ -- which source the snapshots came from Mobile or Web. Web if absent
snapshot_source AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+ -- knowing something is mobile isn't enough, we need to know if e.g. RN or flutter
+ snapshot_library AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
_timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
@@ -3977,8 +3985,10 @@
-- often very useful in incidents or debugging
-- because we batch events we expect message_count to be lower than event_count
event_count SimpleAggregateFunction(sum, Int64),
- -- which source the snapshots came from Android, iOS, Mobile, Web. Web if absent
+ -- which source the snapshots came from Mobile or Web. Web if absent
snapshot_source AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+ -- knowing something is mobile isn't enough, we need to know if e.g. RN or flutter
+ snapshot_library AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
_timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = ReplicatedAggregatingMergeTree('/clickhouse/tables/77f1df52-4b43-11e9-910f-b8ca3a9b9f3e_{shard}/posthog.session_replay_events', '{replica}')
diff --git a/posthog/hogql/metadata.py b/posthog/hogql/metadata.py
index de309adf441b7..ea82d78a0f0ba 100644
--- a/posthog/hogql/metadata.py
+++ b/posthog/hogql/metadata.py
@@ -26,6 +26,7 @@
HogQLMetadataResponse,
HogQLNotice,
)
+from posthog.hogql.visitor import TraversingVisitor
def get_hogql_metadata(
@@ -39,6 +40,7 @@ def get_hogql_metadata(
errors=[],
warnings=[],
notices=[],
+ table_names=[],
)
query_modifiers = create_default_modifiers_for_team(team)
@@ -71,6 +73,8 @@ def get_hogql_metadata(
if query.variables:
select_ast = replace_variables(select_ast, list(query.variables.values()), team)
_is_valid_view = is_valid_view(select_ast)
+ table_names = get_table_names(select_ast)
+ response.table_names = table_names
response.isValidView = _is_valid_view
print_ast(
select_ast,
@@ -138,3 +142,28 @@ def is_valid_view(select_query: ast.SelectQuery | ast.SelectSetQuery) -> bool:
if field.chain and field.chain[-1] == "*":
return False
return True
+
+
+def get_table_names(select_query: ast.SelectQuery | ast.SelectSetQuery) -> list[str]:
+ # Don't need types, we're only interested in the table names as passed in
+ collector = TableCollector()
+ collector.visit(select_query)
+ return list(collector.table_names - collector.ctes)
+
+
+class TableCollector(TraversingVisitor):
+ def __init__(self):
+ self.table_names = set()
+ self.ctes = set()
+
+ def visit_cte(self, node: ast.CTE):
+ self.ctes.add(node.name)
+ super().visit(node.expr)
+
+ def visit_join_expr(self, node: ast.JoinExpr):
+ if isinstance(node.table, ast.Field):
+ self.table_names.add(node.table.chain[0])
+ else:
+ self.visit(node.table)
+
+ self.visit(node.next_join)
diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py
index ca617283b51be..720d71bebb333 100644
--- a/posthog/hogql/test/test_metadata.py
+++ b/posthog/hogql/test/test_metadata.py
@@ -464,3 +464,52 @@ def test_is_valid_view_is_false_when_using_scoped_asterisk(self):
"errors": [],
},
)
+
+ def test_table_collector_basic_select(self):
+ metadata = self._select("SELECT event FROM events")
+ self.assertEqual(metadata.table_names, ["events"])
+
+ def test_table_collector_multiple_tables(self):
+ metadata = self._select(
+ "SELECT events.event, persons.name FROM events JOIN persons ON events.person_id = persons.id"
+ )
+ self.assertEqual(sorted(metadata.table_names or []), sorted(["events", "persons"]))
+
+ def test_table_collector_with_cte(self):
+ metadata = self._select("""
+ WITH events_count AS (
+ SELECT count(*) as count FROM events
+ )
+ SELECT * FROM events_count
+ """)
+ self.assertEqual(sorted(metadata.table_names or []), sorted(["events"]))
+
+ def test_table_collector_subquery(self):
+ metadata = self._select("""
+ SELECT * FROM (
+ SELECT event FROM events
+ UNION ALL
+ SELECT event FROM events_summary
+ )
+ """)
+ self.assertEqual(sorted(metadata.table_names or []), sorted(["events", "events_summary"]))
+
+ def test_table_in_filter(self):
+ metadata = self._select("SELECT * FROM events WHERE event IN (SELECT event FROM events_summary)")
+ self.assertEqual(sorted(metadata.table_names or []), sorted(["events", "events_summary"]))
+
+ def test_table_collector_complex_query(self):
+ metadata = self._select("""
+ WITH user_counts AS (
+ SELECT person_id, count(*) as count
+ FROM events
+ GROUP BY person_id
+ )
+ SELECT
+ p.name,
+ uc.count
+ FROM persons p
+ LEFT JOIN user_counts uc ON p.id = uc.person_id
+ LEFT JOIN cohorts c ON p.cohort_id = c.id
+ """)
+ self.assertEqual(sorted(metadata.table_names or []), sorted(["events", "persons", "cohorts"]))
diff --git a/posthog/models/feature_flag/flag_status.py b/posthog/models/feature_flag/flag_status.py
index fa7ad52929304..18caaa5814a5a 100644
--- a/posthog/models/feature_flag/flag_status.py
+++ b/posthog/models/feature_flag/flag_status.py
@@ -23,6 +23,7 @@ class FeatureFlagStatus(StrEnum):
# - ACTIVE: The feature flag is actively evaluated and the evaluations continue to vary.
# - STALE: The feature flag has been fully rolled out to users. Its evaluations can not vary.
# - INACTIVE: The feature flag is not being actively evaluated. STALE takes precedence over INACTIVE.
+# NOTE: The "inactive" status is not currently used, but may be used in the future to automatically archive flags.
# - DELETED: The feature flag has been soft deleted.
# - UNKNOWN: The feature flag is not found in the database.
class FeatureFlagStatusChecker:
@@ -49,10 +50,6 @@ def get_status(self) -> tuple[FeatureFlagStatus, FeatureFlagStatusReason]:
if is_flag_fully_rolled_out:
return FeatureFlagStatus.STALE, fully_rolled_out_explanation
- # Final, and most expensive check: see if the flag has been evaluated recently.
- if self.is_flag_unevaluated_recently(flag):
- return FeatureFlagStatus.INACTIVE, "Flag has not been evaluated recently"
-
return FeatureFlagStatus.ACTIVE, "Flag is not fully rolled out and may still be active"
def is_flag_fully_rolled_out(self, flag: FeatureFlag) -> tuple[bool, FeatureFlagStatusReason]:
diff --git a/posthog/models/remote_config.py b/posthog/models/remote_config.py
index 08eaa81fc8a41..5ffc726d0d1c2 100644
--- a/posthog/models/remote_config.py
+++ b/posthog/models/remote_config.py
@@ -7,6 +7,7 @@
from django.http import HttpRequest
from django.utils import timezone
from prometheus_client import Counter
+import requests
from sentry_sdk import capture_exception
import structlog
@@ -38,6 +39,12 @@
labelnames=["result"],
)
+REMOTE_CONFIG_CDN_PURGE_COUNTER = Counter(
+ "posthog_remote_config_cdn_purge",
+ "Number of times the remote config CDN purge task has been run",
+ labelnames=["result"],
+)
+
logger = structlog.get_logger(__name__)
@@ -355,6 +362,8 @@ def sync(self):
cache.set(cache_key_for_team_token(self.team.api_token, "config"), config, timeout=CACHE_TIMEOUT)
+ self._purge_cdn()
+
# TODO: Invalidate caches - in particular this will be the Cloudflare CDN cache
self.synced_at = timezone.now()
self.save()
@@ -366,6 +375,37 @@ def sync(self):
CELERY_TASK_REMOTE_CONFIG_SYNC.labels(result="failure").inc()
raise
+ def _purge_cdn(self):
+ if (
+ not settings.REMOTE_CONFIG_CDN_PURGE_ENDPOINT
+ or not settings.REMOTE_CONFIG_CDN_PURGE_TOKEN
+ or not settings.REMOTE_CONFIG_CDN_PURGE_DOMAINS
+ ):
+ return
+
+ logger.info(f"Purging CDN for team {self.team_id}")
+
+ data: dict[str, Any] = {"files": []}
+
+ for domain in settings.REMOTE_CONFIG_CDN_PURGE_DOMAINS:
+ # Check if the domain starts with https:// and if not add it
+ full_domain = domain if domain.startswith("https://") else f"https://{domain}"
+ data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/config"})
+ data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/config.js"})
+ data["files"].append({"url": f"{full_domain}/array/{self.team.api_token}/array.js"})
+
+ try:
+ requests.post(
+ settings.REMOTE_CONFIG_CDN_PURGE_ENDPOINT,
+ headers={"Authorization": f"Bearer {settings.REMOTE_CONFIG_CDN_PURGE_TOKEN}"},
+ data=data,
+ )
+ except Exception:
+ logger.exception(f"Failed to purge CDN for team {self.team_id}")
+ REMOTE_CONFIG_CDN_PURGE_COUNTER.labels(result="failure").inc()
+ else:
+ REMOTE_CONFIG_CDN_PURGE_COUNTER.labels(result="success").inc()
+
def __str__(self):
return f"RemoteConfig {self.team_id}"
diff --git a/posthog/models/test/test_remote_config.py b/posthog/models/test/test_remote_config.py
index fa03badeca141..d9565e2422ddb 100644
--- a/posthog/models/test/test_remote_config.py
+++ b/posthog/models/test/test_remote_config.py
@@ -440,6 +440,29 @@ def test_only_includes_recording_for_approved_domains(self):
config = self.remote_config.get_config_via_token(self.team.api_token, request=mock_request)
assert not config["sessionRecording"]
+ @patch("posthog.models.remote_config.requests.post")
+ def test_purges_cdn_cache_on_sync(self, mock_post):
+ with self.settings(
+ REMOTE_CONFIG_CDN_PURGE_ENDPOINT="https://api.cloudflare.com/client/v4/zones/MY_ZONE_ID/purge_cache",
+ REMOTE_CONFIG_CDN_PURGE_TOKEN="MY_TOKEN",
+ REMOTE_CONFIG_CDN_PURGE_DOMAINS=["cdn.posthog.com", "https://cdn2.posthog.com"],
+ ):
+ self.remote_config.sync()
+ mock_post.assert_called_once_with(
+ "https://api.cloudflare.com/client/v4/zones/MY_ZONE_ID/purge_cache",
+ headers={"Authorization": "Bearer MY_TOKEN"},
+ data={
+ "files": [
+ {"url": "https://cdn.posthog.com/array/phc_12345/config"},
+ {"url": "https://cdn.posthog.com/array/phc_12345/config.js"},
+ {"url": "https://cdn.posthog.com/array/phc_12345/array.js"},
+ {"url": "https://cdn2.posthog.com/array/phc_12345/config"},
+ {"url": "https://cdn2.posthog.com/array/phc_12345/config.js"},
+ {"url": "https://cdn2.posthog.com/array/phc_12345/array.js"},
+ ]
+ },
+ )
+
class TestRemoteConfigJS(_RemoteConfigBase):
def test_renders_js_including_config(self):
@@ -677,7 +700,7 @@ def test_renders_js_including_site_functions(self):
const source = (function () {let exports={};"use strict";;return exports;})();
let processEvent = undefined;
if ('onEvent' in source) {
- processEvent = function processEvent(globals) {
+ processEvent = function processEvent(globals, posthog) {
if (!('onEvent' in source)) { return; };
const inputs = buildInputs(globals);
const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } };
@@ -704,7 +727,7 @@ def test_renders_js_including_site_functions(self):
}
return {
- processEvent: processEvent
+ processEvent: (globals) => processEvent(globals, posthog)
}
}
@@ -723,7 +746,7 @@ def test_renders_js_including_site_functions(self):
const source = (function () {let exports={};"use strict";;return exports;})();
let processEvent = undefined;
if ('onEvent' in source) {
- processEvent = function processEvent(globals) {
+ processEvent = function processEvent(globals, posthog) {
if (!('onEvent' in source)) { return; };
const inputs = buildInputs(globals);
const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } };
@@ -750,7 +773,7 @@ def test_renders_js_including_site_functions(self):
}
return {
- processEvent: processEvent
+ processEvent: (globals) => processEvent(globals, posthog)
}
}
diff --git a/posthog/schema.py b/posthog/schema.py
index 4bc3d81952e91..c46efd4a36499 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -1354,6 +1354,7 @@ class QueryResponseAlternative7(BaseModel):
isValidView: Optional[bool] = None
notices: list[HogQLNotice]
query: Optional[str] = None
+ table_names: Optional[list[str]] = None
warnings: list[HogQLNotice]
@@ -2286,6 +2287,7 @@ class HogQLMetadataResponse(BaseModel):
isValidView: Optional[bool] = None
notices: list[HogQLNotice]
query: Optional[str] = None
+ table_names: Optional[list[str]] = None
warnings: list[HogQLNotice]
diff --git a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
index 0d875e0e4d9ef..684206e6aef5b 100644
--- a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py
@@ -135,3 +135,24 @@
table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
cluster=settings.CLICKHOUSE_CLUSTER,
)
+
+# migration to add library column to the session replay table
+ALTER_SESSION_REPLAY_ADD_LIBRARY_COLUMN = """
+ ALTER TABLE {table_name} on CLUSTER '{cluster}'
+ ADD COLUMN IF NOT EXISTS snapshot_library AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC'))
+"""
+
+ADD_LIBRARY_DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_LIBRARY_COLUMN.format(
+ table_name="session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
+
+ADD_LIBRARY_WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_LIBRARY_COLUMN.format(
+ table_name="writable_session_replay_events",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
+
+ADD_LIBRARY_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: ALTER_SESSION_REPLAY_ADD_LIBRARY_COLUMN.format(
+ table_name=SESSION_REPLAY_EVENTS_DATA_TABLE(),
+ cluster=settings.CLICKHOUSE_CLUSTER,
+)
diff --git a/posthog/session_recordings/sql/session_replay_event_sql.py b/posthog/session_recordings/sql/session_replay_event_sql.py
index 91dd2ff191fe9..09c8f343540e8 100644
--- a/posthog/session_recordings/sql/session_replay_event_sql.py
+++ b/posthog/session_recordings/sql/session_replay_event_sql.py
@@ -35,7 +35,8 @@
size Int64,
event_count Int64,
message_count Int64,
- snapshot_source LowCardinality(Nullable(String))
+ snapshot_source LowCardinality(Nullable(String)),
+ snapshot_library Nullable(String)
) ENGINE = {engine}
"""
@@ -75,8 +76,10 @@
-- often very useful in incidents or debugging
-- because we batch events we expect message_count to be lower than event_count
event_count SimpleAggregateFunction(sum, Int64),
- -- which source the snapshots came from Android, iOS, Mobile, Web. Web if absent
+ -- which source the snapshots came from Mobile or Web. Web if absent
snapshot_source AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+ -- knowing something is mobile isn't enough, we need to know if e.g. RN or flutter
+ snapshot_library AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
_timestamp SimpleAggregateFunction(max, DateTime)
) ENGINE = {engine}
"""
@@ -147,6 +150,7 @@
sum(message_count) as message_count,
sum(event_count) as event_count,
argMinState(snapshot_source, first_timestamp) as snapshot_source,
+argMinState(snapshot_library, first_timestamp) as snapshot_library,
max(_timestamp) as _timestamp
FROM {database}.kafka_session_replay_events
group by session_id, team_id
@@ -169,6 +173,7 @@
`console_error_count` Int64, `size` Int64, `message_count` Int64,
`event_count` Int64,
`snapshot_source` AggregateFunction(argMin, LowCardinality(Nullable(String)), DateTime64(6, 'UTC')),
+`snapshot_library` AggregateFunction(argMin, Nullable(String), DateTime64(6, 'UTC')),
`_timestamp` Nullable(DateTime)
)""",
)
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index cca19f6221a50..49c68b0adb978 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -398,3 +398,8 @@
# disables frontend side navigation hooks to make hot-reload work seamlessly
DEV_DISABLE_NAVIGATION_HOOKS = get_from_env("DEV_DISABLE_NAVIGATION_HOOKS", False, type_cast=bool)
+
+
+REMOTE_CONFIG_CDN_PURGE_ENDPOINT = get_from_env("REMOTE_CONFIG_CDN_PURGE_ENDPOINT", "")
+REMOTE_CONFIG_CDN_PURGE_TOKEN = get_from_env("REMOTE_CONFIG_CDN_PURGE_TOKEN", "")
+REMOTE_CONFIG_CDN_PURGE_DOMAINS = get_list(os.getenv("REMOTE_CONFIG_CDN_PURGE_DOMAINS", ""))
diff --git a/posthog/test/base.py b/posthog/test/base.py
index 5598b1892eaac..dc0760f4a3b5b 100644
--- a/posthog/test/base.py
+++ b/posthog/test/base.py
@@ -792,6 +792,14 @@ def _execute_migration_with_snapshots(self, executor):
def setUpBeforeMigration(self, apps):
pass
+ @classmethod
+ def tearDownClass(cls):
+ super().tearDownClass() # type: ignore
+ executor = MigrationExecutor(connection) # Reset Django's migration state
+ targets = executor.loader.graph.leaf_nodes()
+ executor.migrate(targets) # Migrate to the latest migration
+ executor.loader.build_graph() # Reload.
+
class TestMigrations(BaseTestMigrations, BaseTest):
"""