{thread.map((message, index) => {
if (isHumanMessage(message)) {
return (
@@ -51,6 +60,31 @@ export function Thread(): JSX.Element | null {
)
}
+ if (isFailureMessage(message)) {
+ return (
+
}
+ size="small"
+ className="mt-2"
+ type="secondary"
+ onClick={() => retryLastMessage()}
+ >
+ Try again
+
+ )
+ }
+ >
+ {message.content ||
Max has failed to generate an answer. Please try again.}
+
+ )
+ }
+
return null
})}
{threadLoading && (
@@ -65,23 +99,27 @@ export function Thread(): JSX.Element | null {
)
}
-const Message = React.forwardRef
>(
- function Message({ type, children, className }, ref): JSX.Element {
- if (type === AssistantMessageType.Human) {
- return (
-
- {children}
-
- )
- }
-
+const Message = React.forwardRef<
+ HTMLDivElement,
+ React.PropsWithChildren<{ type: 'human' | 'ai'; className?: string; action?: React.ReactNode }>
+>(function Message({ type, children, className, action }, ref): JSX.Element {
+ if (type === AssistantMessageType.Human) {
return (
-
+
{children}
)
}
-)
+
+ return (
+
+
+ {children}
+
+ {action}
+
+ )
+})
function Answer({
message,
@@ -107,7 +145,17 @@ function Answer({
return (
<>
{message.reasoning_steps && (
-
+ } status="warning" size="small">
+ Max is generating this answer one more time because the previous attempt has failed.
+
+ )
+ }
+ className={status === 'error' ? 'border-warning' : undefined}
+ >
{message.reasoning_steps.map((step, index) => (
- {step}
diff --git a/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts b/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts
index 18b82c1947dc6..3bd38eb1e62cc 100644
--- a/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts
+++ b/frontend/src/scenes/max/__mocks__/chatResponse.mocks.ts
@@ -1,3 +1,25 @@
+import { AssistantGenerationStatusEvent, AssistantGenerationStatusType } from '~/queries/schema'
+
import chatResponse from './chatResponse.json'
+import failureResponse from './failureResponse.json'
+
+function generateChunk(events: string[]): string {
+ return events.map((event) => (event.startsWith('event:') ? `${event}\n` : `${event}\n\n`)).join('')
+}
+
+export const chatResponseChunk = generateChunk(['event: message', `data: ${JSON.stringify(chatResponse)}`])
+
+const generationFailure: AssistantGenerationStatusEvent = { type: AssistantGenerationStatusType.GenerationError }
+const responseWithReasoningStepsOnly = {
+ ...chatResponse,
+ answer: null,
+}
+
+export const generationFailureChunk = generateChunk([
+ 'event: message',
+ `data: ${JSON.stringify(responseWithReasoningStepsOnly)}`,
+ 'event: status',
+ `data: ${JSON.stringify(generationFailure)}`,
+])
-export const chatResponseChunk = `data: ${JSON.stringify(chatResponse)}\n\n`
+export const failureChunk = generateChunk(['event: message', `data: ${JSON.stringify(failureResponse)}`])
diff --git a/frontend/src/scenes/max/__mocks__/failureResponse.json b/frontend/src/scenes/max/__mocks__/failureResponse.json
new file mode 100644
index 0000000000000..3d9476fc3d958
--- /dev/null
+++ b/frontend/src/scenes/max/__mocks__/failureResponse.json
@@ -0,0 +1,4 @@
+{
+ "type": "ai/failure",
+ "content": "Oops! It looks like I’m having trouble generating this trends insight. Could you please try again?"
+}
diff --git a/frontend/src/scenes/max/maxLogic.ts b/frontend/src/scenes/max/maxLogic.ts
index 4d722a6fecadd..aa8ddb63805f2 100644
--- a/frontend/src/scenes/max/maxLogic.ts
+++ b/frontend/src/scenes/max/maxLogic.ts
@@ -1,10 +1,21 @@
+import { captureException } from '@sentry/react'
import { shuffle } from 'd3'
import { createParser } from 'eventsource-parser'
import { actions, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import api from 'lib/api'
-
-import { AssistantMessageType, NodeKind, RootAssistantMessage, SuggestedQuestionsQuery } from '~/queries/schema'
+import { isHumanMessage, isVisualizationMessage } from 'scenes/max/utils'
+
+import {
+ AssistantEventType,
+ AssistantGenerationStatusEvent,
+ AssistantGenerationStatusType,
+ AssistantMessageType,
+ FailureMessage,
+ NodeKind,
+ RootAssistantMessage,
+ SuggestedQuestionsQuery,
+} from '~/queries/schema'
import type { maxLogicType } from './maxLogicType'
@@ -18,6 +29,11 @@ export type ThreadMessage = RootAssistantMessage & {
status?: MessageStatus
}
+const FAILURE_MESSAGE: FailureMessage = {
+ type: AssistantMessageType.Failure,
+ content: 'Oops! It looks like I’m having trouble generating this trends insight. Could you please try again?',
+}
+
export const maxLogic = kea([
path(['scenes', 'max', 'maxLogic']),
props({} as MaxLogicProps),
@@ -31,6 +47,7 @@ export const maxLogic = kea([
setQuestion: (question: string) => ({ question }),
setVisibleSuggestions: (suggestions: string[]) => ({ suggestions }),
shuffleVisibleSuggestions: true,
+ retryLastMessage: true,
}),
reducers({
question: [
@@ -132,24 +149,34 @@ export const maxLogic = kea([
let firstChunk = true
const parser = createParser({
- onEvent: (event) => {
- const parsedResponse = parseResponse(event.data)
-
- if (!parsedResponse) {
- return
- }
+ onEvent: ({ data, event }) => {
+ if (event === AssistantEventType.Message) {
+ const parsedResponse = parseResponse(data)
+ if (!parsedResponse) {
+ return
+ }
- if (firstChunk) {
- firstChunk = false
+ if (firstChunk) {
+ firstChunk = false
+
+ if (parsedResponse) {
+ actions.addMessage({ ...parsedResponse, status: 'loading' })
+ }
+ } else if (parsedResponse) {
+ actions.replaceMessage(newIndex, {
+ ...parsedResponse,
+ status: values.thread[newIndex].status,
+ })
+ }
+ } else if (event === AssistantEventType.Status) {
+ const parsedResponse = parseResponse(data)
+ if (!parsedResponse) {
+ return
+ }
- if (parsedResponse) {
- actions.addMessage({ ...parsedResponse, status: 'loading' })
+ if (parsedResponse.type === AssistantGenerationStatusType.GenerationError) {
+ actions.setMessageStatus(newIndex, 'error')
}
- } else if (parsedResponse) {
- actions.replaceMessage(newIndex, {
- ...parsedResponse,
- status: 'loading',
- })
}
},
})
@@ -160,16 +187,41 @@ export const maxLogic = kea([
parser.feed(decoder.decode(value))
if (done) {
- actions.setMessageStatus(newIndex, 'completed')
+ const generatedMessage = values.thread[newIndex]
+ if (generatedMessage && isVisualizationMessage(generatedMessage) && generatedMessage.plan) {
+ actions.setMessageStatus(newIndex, 'completed')
+ } else if (generatedMessage) {
+ actions.replaceMessage(newIndex, FAILURE_MESSAGE)
+ } else {
+ actions.addMessage({
+ ...FAILURE_MESSAGE,
+ status: 'completed',
+ })
+ }
break
}
}
- } catch {
- actions.setMessageStatus(values.thread.length - 1 === newIndex ? newIndex : newIndex - 1, 'error')
+ } catch (e) {
+ captureException(e)
+
+ if (values.thread[newIndex]) {
+ actions.replaceMessage(newIndex, FAILURE_MESSAGE)
+ } else {
+ actions.addMessage({
+ ...FAILURE_MESSAGE,
+ status: 'completed',
+ })
+ }
}
actions.setThreadLoaded()
},
+ retryLastMessage: () => {
+ const lastMessage = values.thread.filter(isHumanMessage).pop()
+ if (lastMessage) {
+ actions.askMax(lastMessage.content)
+ }
+ },
})),
selectors({
sessionId: [(_, p) => [p.sessionId], (sessionId) => sessionId],
@@ -180,10 +232,10 @@ export const maxLogic = kea([
* Parses the generation result from the API. Some generation chunks might be sent in batches.
* @param response
*/
-function parseResponse(response: string): RootAssistantMessage | null | undefined {
+function parseResponse(response: string): T | null | undefined {
try {
const parsed = JSON.parse(response)
- return parsed as RootAssistantMessage | null | undefined
+ return parsed as T | null | undefined
} catch {
return null
}
diff --git a/frontend/src/scenes/max/utils.ts b/frontend/src/scenes/max/utils.ts
index 263eb2f521baf..84f2d1d4a2aba 100644
--- a/frontend/src/scenes/max/utils.ts
+++ b/frontend/src/scenes/max/utils.ts
@@ -1,4 +1,10 @@
-import { AssistantMessageType, HumanMessage, RootAssistantMessage, VisualizationMessage } from '~/queries/schema'
+import {
+ AssistantMessageType,
+ FailureMessage,
+ HumanMessage,
+ RootAssistantMessage,
+ VisualizationMessage,
+} from '~/queries/schema'
export function isVisualizationMessage(
message: RootAssistantMessage | undefined | null
@@ -9,3 +15,7 @@ export function isVisualizationMessage(
export function isHumanMessage(message: RootAssistantMessage | undefined | null): message is HumanMessage {
return message?.type === AssistantMessageType.Human
}
+
+export function isFailureMessage(message: RootAssistantMessage | undefined | null): message is FailureMessage {
+ return message?.type === AssistantMessageType.Failure
+}
diff --git a/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx b/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx
index 9dee4c767cc5e..681e63a6239b2 100644
--- a/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx
+++ b/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx
@@ -161,6 +161,10 @@ export function HogFunctionFilters(): JSX.Element {
value: '{person.id}',
label: 'Run once per person per interval',
},
+ {
+ value: '{concat(person.id, event.event)}',
+ label: 'Run once per person per event name per interval',
+ },
]}
value={value?.hash ?? null}
onChange={(val) =>
diff --git a/frontend/src/scenes/settings/user/personalAPIKeysLogic.tsx b/frontend/src/scenes/settings/user/personalAPIKeysLogic.tsx
index e4f78f929def5..be47278635896 100644
--- a/frontend/src/scenes/settings/user/personalAPIKeysLogic.tsx
+++ b/frontend/src/scenes/settings/user/personalAPIKeysLogic.tsx
@@ -9,7 +9,7 @@ import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast'
import { urls } from 'scenes/urls'
import { userLogic } from 'scenes/userLogic'
-import { OrganizationBasicType, PersonalAPIKeyType, TeamBasicType } from '~/types'
+import { APIScopeObject, OrganizationBasicType, PersonalAPIKeyType, TeamBasicType } from '~/types'
import type { personalAPIKeysLogicType } from './personalAPIKeysLogicType'
@@ -32,7 +32,7 @@ export const API_KEY_SCOPE_PRESETS = [
]
export type APIScope = {
- key: string
+ key: APIScopeObject
info?: string | JSX.Element
disabledActions?: ('read' | 'write')[]
disabledWhenProjectScoped?: boolean
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index 6b78307b81774..0971c5dbdb008 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -35,6 +35,8 @@ import { QueryContext } from '~/queries/types'
import type {
DashboardFilter,
DatabaseSchemaField,
+ ExperimentFunnelsQuery,
+ ExperimentTrendsQuery,
HogQLQuery,
HogQLQueryModifiers,
HogQLVariable,
@@ -2352,9 +2354,11 @@ export interface PathsFilterType extends FilterType {
path_dropoff_key?: string // Paths People Dropoff Key
}
+export type RetentionEntityKind = NodeKind.ActionsNode | NodeKind.EventsNode
+
export interface RetentionEntity {
id?: string | number // TODO: Fix weird typing issues
- kind?: NodeKind.ActionsNode | NodeKind.EventsNode
+ kind?: RetentionEntityKind
name?: string
type?: EntityType
/** @asType integer */
@@ -3242,6 +3246,11 @@ export interface Group {
group_properties: Record
}
+export interface ExperimentMetric {
+ type: string
+ query: ExperimentTrendsQuery | ExperimentFunnelsQuery
+}
+
export interface Experiment {
id: number | 'new'
name: string
@@ -3250,6 +3259,7 @@ export interface Experiment {
feature_flag?: FeatureFlagBasicType
exposure_cohort?: number
filters: FilterType
+ metrics: ExperimentMetric[]
parameters: {
minimum_detectable_effect?: number
recommended_running_time?: number
@@ -3294,7 +3304,7 @@ interface BaseExperimentResults {
}
export interface _TrendsExperimentResults extends BaseExperimentResults {
- insight: TrendResult[]
+ insight: Record[]
filters: TrendsFilterType
variants: TrendExperimentVariant[]
last_refresh?: string | null
@@ -3302,7 +3312,7 @@ export interface _TrendsExperimentResults extends BaseExperimentResults {
}
export interface _FunnelExperimentResults extends BaseExperimentResults {
- insight: FunnelStep[][]
+ insight: Record[]
filters: FunnelsFilterType
variants: FunnelExperimentVariant[]
last_refresh?: string | null
@@ -3821,6 +3831,37 @@ export interface RoleMemberType {
user_uuid: string
}
+export type APIScopeObject =
+ | 'action'
+ | 'activity_log'
+ | 'annotation'
+ | 'batch_export'
+ | 'cohort'
+ | 'dashboard'
+ | 'dashboard_template'
+ | 'early_access_feature'
+ | 'event_definition'
+ | 'experiment'
+ | 'export'
+ | 'feature_flag'
+ | 'group'
+ | 'insight'
+ | 'query'
+ | 'notebook'
+ | 'organization'
+ | 'organization_member'
+ | 'person'
+ | 'plugin'
+ | 'project'
+ | 'property_definition'
+ | 'session_recording'
+ | 'session_recording_playlist'
+ | 'sharing_configuration'
+ | 'subscription'
+ | 'survey'
+ | 'user'
+ | 'webhook'
+
export interface OrganizationResourcePermissionType {
id: string
resource: Resource
diff --git a/plugin-server/src/cdp/cdp-api.ts b/plugin-server/src/cdp/cdp-api.ts
index 0f4ea02b96814..ed4e60976b1e9 100644
--- a/plugin-server/src/cdp/cdp-api.ts
+++ b/plugin-server/src/cdp/cdp-api.ts
@@ -156,7 +156,7 @@ export class CdpApi {
invocation: {
...invocation,
queue: 'hog',
- queueParameters: { response: { status: 200, body: '{}' } },
+ queueParameters: { response: { status: 200, headers: {} }, body: '{}' },
},
finished: false,
logs: [
@@ -185,7 +185,7 @@ export class CdpApi {
res.json({
status: lastResponse.finished ? 'success' : 'error',
- error: String(lastResponse.error),
+ error: lastResponse.error ? String(lastResponse.error) : null,
logs: logs,
})
} catch (e) {
diff --git a/plugin-server/src/cdp/fetch-executor.ts b/plugin-server/src/cdp/fetch-executor.ts
index 62f061e70c037..e4ad6a53a3613 100644
--- a/plugin-server/src/cdp/fetch-executor.ts
+++ b/plugin-server/src/cdp/fetch-executor.ts
@@ -95,6 +95,7 @@ export class FetchExecutor {
const resParams: HogFunctionQueueParametersFetchResponse = {
response: {
status: 0,
+ headers: {},
},
error: null,
timings: [],
@@ -120,8 +121,9 @@ export class FetchExecutor {
resParams.response = {
status: fetchResponse.status,
- body: responseBody,
+ headers: Object.fromEntries(fetchResponse.headers.entries()),
}
+ resParams.body = responseBody
} catch (err) {
status.error('🦔', `[HogExecutor] Error during fetch`, { error: String(err) })
resParams.error = 'Something went wrong with the fetch request.'
diff --git a/plugin-server/src/cdp/hog-executor.ts b/plugin-server/src/cdp/hog-executor.ts
index c57d954460f66..92b38fb71c4a8 100644
--- a/plugin-server/src/cdp/hog-executor.ts
+++ b/plugin-server/src/cdp/hog-executor.ts
@@ -9,6 +9,7 @@ import { Hub, ValueMatcher } from '../types'
import { status } from '../utils/status'
import { HogFunctionManager } from './hog-function-manager'
import {
+ CyclotronFetchFailureInfo,
HogFunctionInvocation,
HogFunctionInvocationGlobals,
HogFunctionInvocationGlobalsWithInputs,
@@ -217,25 +218,41 @@ export class HogExecutor {
const {
logs = [],
response = null,
+ trace = [],
error,
timings = [],
} = invocation.queueParameters as HogFunctionQueueParametersFetchResponse
+
+ let body = invocation.queueParameters.body
// Reset the queue parameters to be sure
invocation.queue = 'hog'
invocation.queueParameters = undefined
- const status = typeof response?.status === 'number' ? response.status : 503
-
- // Special handling for fetch
- if (status >= 400) {
- // Generic warn log for bad status codes
+ // If we got a trace, then the last "result" is the final attempt, and we should try to grab a status from it
+ // or any preceding attempts, and produce a log message for each of them
+ let status = null
+ if (trace.length > 0) {
logs.push({
- level: 'warn',
+ level: 'error',
timestamp: DateTime.now(),
- message: `Fetch returned bad status: ${status}`,
+ message: `Fetch failed after ${trace.length} attempts`,
})
+ for (const attempt of trace) {
+ logs.push({
+ level: 'warn',
+ timestamp: DateTime.now(),
+ message: fetchFailureToLogMessage(attempt),
+ })
+ if (attempt.status) {
+ status = attempt.status
+ }
+ }
}
+ // If we got a response from fetch, we know the response code was in the <300 range,
+ // but if we didn't (indicating a bug in the fetch worker), we use a default of 503
+ status = response?.status ?? 503
+
if (!invocation.vmState) {
throw new Error("VM state wasn't provided for queue parameters")
}
@@ -244,9 +261,9 @@ export class HogExecutor {
throw new Error(error)
}
- if (typeof response?.body === 'string') {
+ if (typeof body === 'string') {
try {
- response.body = JSON.parse(response.body)
+ body = JSON.parse(body)
} catch (e) {
// pass - if it isn't json we just pass it on
}
@@ -255,7 +272,7 @@ export class HogExecutor {
// Finally we create the response object as the VM expects
invocation.vmState!.stack.push({
status,
- body: response?.body,
+ body: body,
})
invocation.timings = invocation.timings.concat(timings)
result.logs = [...logs, ...result.logs]
@@ -556,3 +573,7 @@ export class HogExecutor {
return values
}
}
+
+function fetchFailureToLogMessage(failure: CyclotronFetchFailureInfo): string {
+ return `Fetch failure of kind ${failure.kind} with status ${failure.status} and message ${failure.message}`
+}
diff --git a/plugin-server/src/cdp/types.ts b/plugin-server/src/cdp/types.ts
index 037e08e7a9a54..8a675e605e017 100644
--- a/plugin-server/src/cdp/types.ts
+++ b/plugin-server/src/cdp/types.ts
@@ -169,14 +169,35 @@ export type HogFunctionQueueParametersFetchRequest = {
headers?: Record
}
+export type CyclotronFetchFailureKind =
+ | 'timeout'
+ | 'timeoutgettingbody'
+ | 'missingparameters'
+ | 'invalidparameters'
+ | 'requesterror'
+ | 'failurestatus'
+ | 'invalidbody'
+ | 'responsetoolarge'
+
+export type CyclotronFetchFailureInfo = {
+ kind: CyclotronFetchFailureKind
+ message: string
+ headers?: Record
+ status?: number
+ timestamp: DateTime
+}
+
export type HogFunctionQueueParametersFetchResponse = {
/** An error message to indicate something went wrong and the invocation should be stopped */
error?: any
- /** The data to be passed to the Hog function from the response */
+ /** On success, the fetch worker returns only the successful response */
response?: {
status: number
- body?: string
+ headers: Record
} | null
+ /** On failure, the fetch worker returns a list of info about the attempts made*/
+ trace?: CyclotronFetchFailureInfo[]
+ body?: string // Both results AND failures can have a body
timings?: HogFunctionTiming[]
logs?: LogEntry[]
}
diff --git a/plugin-server/src/cdp/utils.ts b/plugin-server/src/cdp/utils.ts
index f8031a043a1bc..6546db471d88f 100644
--- a/plugin-server/src/cdp/utils.ts
+++ b/plugin-server/src/cdp/utils.ts
@@ -317,21 +317,14 @@ function prepareQueueParams(
let parameters: HogFunctionInvocation['queueParameters'] = _params
let blob: CyclotronJobUpdate['blob'] = undefined
- if (parameters && 'body' in parameters) {
- // Fetch request
- const { body, ...rest } = parameters
- parameters = rest
- blob = body ? Buffer.from(body) : undefined
- } else if (parameters && 'response' in parameters && parameters.response) {
- // Fetch response
- const { body, ...rest } = parameters.response
- parameters = {
- ...parameters,
- response: rest,
- }
- blob = body ? Buffer.from(body) : undefined
+ if (!parameters) {
+ return { parameters, blob }
}
+ const { body, ...rest } = parameters
+ parameters = rest
+ blob = body ? Buffer.from(body) : undefined
+
return {
parameters,
blob,
@@ -355,14 +348,7 @@ export function cyclotronJobToInvocation(job: CyclotronJob, hogFunction: HogFunc
if (job.blob && params) {
// Deserialize the blob into the params
try {
- const body = job.blob ? Buffer.from(job.blob).toString('utf-8') : undefined
- if ('response' in params && params.response) {
- // Fetch response
- params.response.body = body
- } else if ('method' in params) {
- // Fetch request
- params.body = body
- }
+ params.body = job.blob ? Buffer.from(job.blob).toString('utf-8') : undefined
} catch (e) {
status.error('Error parsing blob', e, job.blob)
captureException(e)
diff --git a/plugin-server/tests/cdp/cdp-api.test.ts b/plugin-server/tests/cdp/cdp-api.test.ts
index 94ccf642230a9..25599334cbc0f 100644
--- a/plugin-server/tests/cdp/cdp-api.test.ts
+++ b/plugin-server/tests/cdp/cdp-api.test.ts
@@ -166,7 +166,7 @@ describe('CDP API', () => {
console.log(res.body.logs[3].message)
expect(res.body).toMatchObject({
status: 'success',
- error: 'undefined',
+ error: null,
logs: [
{
level: 'debug',
@@ -205,6 +205,7 @@ describe('CDP API', () => {
Promise.resolve({
status: 201,
text: () => Promise.resolve(JSON.stringify({ real: true })),
+ headers: new Headers({ 'Content-Type': 'application/json' }),
})
)
const res = await supertest(app)
@@ -214,7 +215,7 @@ describe('CDP API', () => {
expect(res.status).toEqual(200)
expect(res.body).toMatchObject({
status: 'success',
- error: 'undefined',
+ error: null,
logs: [
{
level: 'debug',
@@ -260,7 +261,7 @@ describe('CDP API', () => {
expect(res.status).toEqual(200)
expect(res.body).toMatchObject({
status: 'success',
- error: 'undefined',
+ error: null,
logs: [
{
level: 'debug',
diff --git a/plugin-server/tests/cdp/cdp-e2e.test.ts b/plugin-server/tests/cdp/cdp-e2e.test.ts
index db170f7ea8e1f..2dc228da8705d 100644
--- a/plugin-server/tests/cdp/cdp-e2e.test.ts
+++ b/plugin-server/tests/cdp/cdp-e2e.test.ts
@@ -20,6 +20,7 @@ jest.mock('../../src/utils/fetch', () => {
Promise.resolve({
status: 200,
text: () => Promise.resolve(JSON.stringify({ success: true })),
+ headers: new Headers({ 'Content-Type': 'application/json' }),
json: () => Promise.resolve({ success: true }),
})
),
diff --git a/plugin-server/tests/cdp/examples.ts b/plugin-server/tests/cdp/examples.ts
index fafafce472e3d..f92dd9ed4f97a 100644
--- a/plugin-server/tests/cdp/examples.ts
+++ b/plugin-server/tests/cdp/examples.ts
@@ -708,8 +708,16 @@ export const HOG_MASK_EXAMPLES: Record>
person: {
masking: {
ttl: 30,
- hash: '{person.uuid}',
- bytecode: ['_h', 32, 'uuid', 32, 'person', 1, 2],
+ hash: '{person.id}',
+ bytecode: ['_h', 32, 'id', 32, 'person', 1, 2],
+ threshold: null,
+ },
+ },
+ personAndEvent: {
+ masking: {
+ ttl: 30,
+ hash: '{concat(person.id, event.event)}',
+ bytecode: ['_H', 1, 32, 'id', 32, 'person', 1, 2, 32, 'event', 32, 'event', 1, 2, 2, 'concat', 2],
threshold: null,
},
},
diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts
index 94608b2f5bea1..9d1f5dea2bebf 100644
--- a/plugin-server/tests/cdp/hog-executor.test.ts
+++ b/plugin-server/tests/cdp/hog-executor.test.ts
@@ -31,8 +31,9 @@ const setupFetchResponse = (invocation: HogFunctionInvocation, options?: { statu
],
response: {
status: options?.status ?? 200,
- body: options?.body ?? 'success',
+ headers: { 'Content-Type': 'application/json' },
},
+ body: options?.body ?? 'success',
}
}
diff --git a/plugin-server/tests/cdp/hog-masker.test.ts b/plugin-server/tests/cdp/hog-masker.test.ts
index df05043290e63..a3c7a955574db 100644
--- a/plugin-server/tests/cdp/hog-masker.test.ts
+++ b/plugin-server/tests/cdp/hog-masker.test.ts
@@ -118,6 +118,7 @@ describe('HogMasker', () => {
describe('ttl', () => {
let hogFunctionPerson: HogFunctionType
let hogFunctionAll: HogFunctionType
+ let hogFunctionPersonAndEvent: HogFunctionType
beforeEach(() => {
hogFunctionPerson = createHogFunction({
@@ -127,6 +128,13 @@ describe('HogMasker', () => {
},
})
+ hogFunctionPersonAndEvent = createHogFunction({
+ masking: {
+ ...HOG_MASK_EXAMPLES.personAndEvent.masking!,
+ ttl: 1,
+ },
+ })
+
hogFunctionAll = createHogFunction({
masking: {
...HOG_MASK_EXAMPLES.all.masking!,
@@ -145,20 +153,33 @@ describe('HogMasker', () => {
})
it('should mask with custom hog hash', async () => {
- const globalsPerson1 = createHogExecutionGlobals({ person: { uuid: '1' } as any })
- const globalsPerson2 = createHogExecutionGlobals({ person: { uuid: '2' } as any })
+ const globals1 = createHogExecutionGlobals({
+ person: { id: '1' } as any,
+ event: { event: '$pageview' } as any,
+ })
+ const globals2 = createHogExecutionGlobals({
+ person: { id: '2' } as any,
+ event: { event: '$autocapture' } as any,
+ })
+ const globals3 = createHogExecutionGlobals({
+ person: { id: '2' } as any,
+ event: { event: '$pageview' } as any,
+ })
const invocations = [
- createInvocation(hogFunctionPerson, globalsPerson1),
- createInvocation(hogFunctionAll, globalsPerson1),
- createInvocation(hogFunctionPerson, globalsPerson2),
- createInvocation(hogFunctionAll, globalsPerson2),
+ createInvocation(hogFunctionPerson, globals1),
+ createInvocation(hogFunctionAll, globals1),
+ createInvocation(hogFunctionPersonAndEvent, globals1),
+ createInvocation(hogFunctionPerson, globals2),
+ createInvocation(hogFunctionAll, globals2),
+ createInvocation(hogFunctionPersonAndEvent, globals2),
+ createInvocation(hogFunctionPersonAndEvent, globals3),
]
const res = await masker.filterByMasking(invocations)
expect(res.masked.length).toEqual(1)
- expect(res.notMasked.length).toEqual(3)
+ expect(res.notMasked.length).toEqual(6)
const res2 = await masker.filterByMasking(invocations)
- expect(res2.masked.length).toEqual(4)
+ expect(res2.masked.length).toEqual(7)
expect(res2.notMasked.length).toEqual(0)
})
diff --git a/posthog/api/personal_api_key.py b/posthog/api/personal_api_key.py
index 23f6d531693a0..355d6cc1c189a 100644
--- a/posthog/api/personal_api_key.py
+++ b/posthog/api/personal_api_key.py
@@ -5,7 +5,8 @@
from rest_framework.permissions import IsAuthenticated
from posthog.models import PersonalAPIKey, User
-from posthog.models.personal_api_key import API_SCOPE_ACTIONS, API_SCOPE_OBJECTS, hash_key_value, mask_key_value
+from posthog.models.personal_api_key import hash_key_value, mask_key_value
+from posthog.models.scopes import API_SCOPE_ACTIONS, API_SCOPE_OBJECTS
from posthog.models.team.team import Team
from posthog.models.utils import generate_random_token_personal
from posthog.permissions import TimeSensitiveActionPermission
diff --git a/posthog/api/project.py b/posthog/api/project.py
index 8efca11b21968..c740dc330801a 100644
--- a/posthog/api/project.py
+++ b/posthog/api/project.py
@@ -30,7 +30,7 @@
from posthog.models.async_deletion import AsyncDeletion, DeletionType
from posthog.models.group_type_mapping import GroupTypeMapping
from posthog.models.organization import OrganizationMembership
-from posthog.models.personal_api_key import APIScopeObjectOrNotSupported
+from posthog.models.scopes import APIScopeObjectOrNotSupported
from posthog.models.product_intent.product_intent import ProductIntent
from posthog.models.project import Project
from posthog.models.signals import mute_selected_signals
diff --git a/posthog/api/query.py b/posthog/api/query.py
index f2eaccea53ae5..00d4548eeae37 100644
--- a/posthog/api/query.py
+++ b/posthog/api/query.py
@@ -41,7 +41,14 @@
ClickHouseSustainedRateThrottle,
HogQLQueryThrottle,
)
-from posthog.schema import HumanMessage, QueryRequest, QueryResponseAlternative, QueryStatusResponse
+from posthog.schema import (
+ AssistantEventType,
+ AssistantGenerationStatusEvent,
+ HumanMessage,
+ QueryRequest,
+ QueryResponseAlternative,
+ QueryStatusResponse,
+)
class ServerSentEventRenderer(BaseRenderer):
@@ -185,7 +192,11 @@ def generate():
last_message = None
for message in assistant.stream(validated_body):
last_message = message
- yield f"data: {message}\n\n"
+ if isinstance(message, AssistantGenerationStatusEvent):
+ yield f"event: {AssistantEventType.STATUS}\n"
+ else:
+ yield f"event: {AssistantEventType.MESSAGE}\n"
+ yield f"data: {message.model_dump_json()}\n\n"
human_message = validated_body.messages[-1].root
if isinstance(human_message, HumanMessage):
@@ -195,7 +206,9 @@ def generate():
{"prompt": human_message.content, "response": last_message},
)
- return StreamingHttpResponse(generate(), content_type=ServerSentEventRenderer.media_type)
+ return StreamingHttpResponse(
+ generate(), content_type=ServerSentEventRenderer.media_type, headers={"X-Accel-Buffering": "no"}
+ )
def handle_column_ch_error(self, error):
if getattr(error, "message", None):
diff --git a/posthog/api/routing.py b/posthog/api/routing.py
index 9ff2fede76449..084ddcc94c3ae 100644
--- a/posthog/api/routing.py
+++ b/posthog/api/routing.py
@@ -18,7 +18,7 @@
SharingAccessTokenAuthentication,
)
from posthog.models.organization import Organization
-from posthog.models.personal_api_key import APIScopeObjectOrNotSupported
+from posthog.models.scopes import APIScopeObjectOrNotSupported
from posthog.models.project import Project
from posthog.models.team import Team
from posthog.models.user import User
diff --git a/posthog/api/survey.py b/posthog/api/survey.py
index ea894a7dd30c0..4864612c2b438 100644
--- a/posthog/api/survey.py
+++ b/posthog/api/survey.py
@@ -597,7 +597,7 @@ def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response:
return super().destroy(request, *args, **kwargs)
- @action(methods=["GET"], detail=False)
+ @action(methods=["GET"], detail=False, required_scopes=["survey:read"])
def responses_count(self, request: request.Request, **kwargs):
earliest_survey_start_date = Survey.objects.filter(team_id=self.team_id).aggregate(Min("start_date"))[
"start_date__min"
diff --git a/posthog/api/team.py b/posthog/api/team.py
index 148471919cbc0..88b7f5e28b5ec 100644
--- a/posthog/api/team.py
+++ b/posthog/api/team.py
@@ -28,7 +28,7 @@
from posthog.models.async_deletion import AsyncDeletion, DeletionType
from posthog.models.group_type_mapping import GroupTypeMapping
from posthog.models.organization import OrganizationMembership
-from posthog.models.personal_api_key import APIScopeObjectOrNotSupported
+from posthog.models.scopes import APIScopeObjectOrNotSupported
from posthog.models.project import Project
from posthog.models.signals import mute_selected_signals
from posthog.models.team.util import delete_batch_exports, delete_bulky_postgres_data
diff --git a/posthog/api/test/__snapshots__/test_api_docs.ambr b/posthog/api/test/__snapshots__/test_api_docs.ambr
index a5f9b394809ae..044ee43f1983e 100644
--- a/posthog/api/test/__snapshots__/test_api_docs.ambr
+++ b/posthog/api/test/__snapshots__/test_api_docs.ambr
@@ -1,137 +1,139 @@
# serializer version: 1
# name: TestAPIDocsSchema.test_api_docs_generation_warnings_snapshot
list([
- '/home/runner/work/posthog/posthog/posthog/api/plugin_log_entry.py: Warning [PluginLogEntryViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '',
+ '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/api/explicit_team_member.py: Warning [ExplicitTeamMemberViewSet]: could not derive type of path parameter "project_id" because model "ee.models.explicit_team_membership.ExplicitTeamMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/api/feature_flag_role_access.py: Warning [FeatureFlagRoleAccessViewSet]: could not derive type of path parameter "project_id" because model "ee.models.feature_flag_role_access.FeatureFlagRoleAccess" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/api/rbac/role.py: Warning [RoleMembershipViewSet]: could not derive type of path parameter "organization_id" because model "ee.models.rbac.role.RoleMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/api/rbac/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_associated_flags". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/ee/api/rbac/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_members". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiment_holdouts.py: Warning [ExperimentHoldoutViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.ExperimentHoldout" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiments.py: Warning [EnterpriseExperimentsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.Experiment" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsTypesViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group_type_mapping.GroupTypeMapping" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group.group.Group" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [EnterpriseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/clickhouse/views/person.py: Warning [EnterprisePersonViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.person.person.Person" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet > ActionSerializer]: unable to resolve type hint for function "get_creation_context". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.action.action.Action" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/activity_log.py: Warning [ActivityLogViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.activity_logging.activity_log.ActivityLog" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/annotation.py: Warning [AnnotationsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.annotation.Annotation" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
"/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Error [AppMetricsViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AppMetricsViewSet' should either include a `serializer_class` attribute, or override the `get_serializer_class()` method.)",
- '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [AppMetricsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Error [HistoricalExportsAppMetricsViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.',
- '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [AppMetricsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"',
- '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportRunViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExportRun" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/app_metrics.py: Warning [HistoricalExportsAppMetricsViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/cohort.py: Warning [CohortViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.cohort.cohort.Cohort" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard.py: Warning [DashboardsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard.Dashboard" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/dashboard_collaborator.py: Warning [DashboardCollaboratorViewSet]: could not derive type of path parameter "project_id" because model "ee.models.dashboard_privilege.DashboardPrivilege" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_id". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_distinct_id". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_properties". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_event". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_timestamp". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_person". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements_chain". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard_templates.py: Warning [DashboardTemplateViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard_templates.DashboardTemplate" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/early_access_feature.py: Warning [EarlyAccessFeatureViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.early_access_feature.EarlyAccessFeature" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/explicit_team_member.py: Warning [ExplicitTeamMemberViewSet]: could not derive type of path parameter "project_id" because model "ee.models.explicit_team_membership.ExplicitTeamMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.exported_asset.ExportedAsset" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "has_content". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/event.py: Warning [EventViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ "/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Error [EventDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')",
+ '/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Warning [EventDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.event_definition.EventDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "filename". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group.group.Group" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/insights.py: Warning [EnterpriseInsightsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.insight.Insight" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet > ExportedAssetSerializer]: unable to resolve type hint for function "has_content". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/exports.py: Warning [ExportedAssetViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.exported_asset.ExportedAsset" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/feature_flag.py: Warning [FeatureFlagViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feature_flag.feature_flag.FeatureFlag" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_cache_target_age". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_columns". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hasMore". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_is_cached". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_last_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_next_allowed_client_refresh". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_query_status". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_hogql". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_result". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_timezone". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/insight.py: Warning [EnterpriseInsightsViewSet > InsightSerializer]: unable to resolve type hint for function "get_types". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/person.py: Warning [EnterprisePersonViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.person.person.Person" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/notebook.py: Warning [NotebookViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.notebook.notebook.Notebook" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_member_count". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_metadata". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_config". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_delivery_rate_24h". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_error". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_plugin_info". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet > PluginConfigSerializer]: unable to resolve type hint for function "get_delivery_rate_24h". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsViewSet > PluginSerializer]: unable to resolve type hint for function "get_hog_function_migration_available". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineFrontendAppsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineImportAppsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineTransformationsConfigsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PluginConfigViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.plugin.PluginConfig" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.',
- '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.',
- '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/person.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer > MinimalPersonSerializer]: unable to resolve type hint for function "get_distinct_ids". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.subscription.Subscription" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/subscription.py: Warning [SubscriptionViewSet > SubscriptionSerializer]: unable to resolve type hint for function "summary". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_metadata". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/organization.py: Warning [OrganizationViewSet > OrganizationSerializer]: unable to resolve type hint for function "get_member_count". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet]: could not derive type of path parameter "organization_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/plugin.py: Warning [PipelineDestinationsViewSet > PluginSerializer]: unable to resolve type hint for function "get_hog_function_migration_available". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectBackwardCompatSerializer]: could not resolve field on model with path "person_on_events_querying_enabled". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: Project has no field named \'person_on_events_querying_enabled\')',
+ '/home/runner/work/posthog/posthog/posthog/api/plugin_log_entry.py: Warning [PluginLogEntryViewSet]: could not derive type of path parameter "plugin_config_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectBackwardCompatSerializer]: could not resolve field on model with path "default_modifiers". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: Project has no field named \'default_modifiers\')',
+ '/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectBackwardCompatSerializer]: could not resolve field on model with path "person_on_events_querying_enabled". This is likely a custom field that does some unknown magic. Maybe consider annotating the field/property? Defaulting to "string". (Exception: Project has no field named \'person_on_events_querying_enabled\')',
'/home/runner/work/posthog/posthog/posthog/api/project.py: Warning [ProjectViewSet > ProjectBackwardCompatSerializer]: unable to resolve type hint for function "get_product_intents". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "organization_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_members". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleViewSet > RoleSerializer]: unable to resolve type hint for function "get_associated_flags". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/ee/api/role.py: Warning [RoleMembershipViewSet]: could not derive type of path parameter "organization_id" because model "ee.models.role.RoleMembership" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.action.action.Action" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/action.py: Warning [ActionViewSet > ActionSerializer]: unable to resolve type hint for function "get_creation_context". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- '/home/runner/work/posthog/posthog/posthog/api/activity_log.py: Warning [ActivityLogViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.activity_logging.activity_log.ActivityLog" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/annotation.py: Warning [AnnotationsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.annotation.Annotation" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/cohort.py: Warning [CohortViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.cohort.cohort.Cohort" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/dashboards/dashboard_templates.py: Warning [DashboardTemplateViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.dashboard_templates.DashboardTemplate" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/early_access_feature.py: Warning [EarlyAccessFeatureViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.early_access_feature.EarlyAccessFeature" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/team.py: Warning [TeamViewSet > TeamSerializer]: unable to resolve type hint for function "get_product_intents". Consider using a type hint or @extend_schema_field. Defaulting to string.',
- "/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Error [EventDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')",
- '/home/runner/work/posthog/posthog/posthog/api/event_definition.py: Warning [EventDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.event_definition.EventDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiment_holdouts.py: Warning [ExperimentHoldoutViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.ExperimentHoldout" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/experiments.py: Warning [EnterpriseExperimentsViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.experiment.Experiment" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/feature_flag.py: Warning [FeatureFlagViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feature_flag.feature_flag.FeatureFlag" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/api/feature_flag_role_access.py: Warning [FeatureFlagRoleAccessViewSet]: could not derive type of path parameter "project_id" because model "ee.models.feature_flag_role_access.FeatureFlagRoleAccess" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/clickhouse/views/groups.py: Warning [GroupsTypesViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.group_type_mapping.GroupTypeMapping" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/notebook.py: Warning [NotebookViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.notebook.notebook.Notebook" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
"/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Error [PropertyDefinitionViewSet]: exception raised while getting serializer. Hint: Is get_serializer_class() returning None or is get_queryset() not working without a request? Ignoring the view for now. (Exception: 'AnonymousUser' object has no attribute 'organization')",
'/home/runner/work/posthog/posthog/posthog/api/property_definition.py: Warning [PropertyDefinitionViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.property_definition.PropertyDefinition" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/ee/session_recordings/session_recording_playlist.py: Warning [SessionRecordingPlaylistViewSet]: could not derive type of path parameter "session_recording_id" because model "posthog.session_recordings.models.session_recording_playlist.SessionRecordingPlaylist" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
- '/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feedback.survey.Survey" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/proxy_record.py: Warning [ProxyRecordViewset]: could not derive type of path parameter "organization_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/query.py: Error [QueryViewSet]: unable to guess serializer. This is graceful fallback handling for APIViews. Consider using GenericAPIView as view base class, if view is under your control. Either way you may want to add a serializer_class (or method). Ignoring view for now.',
+ '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/query.py: Warning [QueryViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/session.py: Warning [SessionViewSet]: could not derive type of path parameter "project_id" because it is untyped and obtaining queryset from the viewset failed. Consider adding a type to the path (e.g. ) or annotating the parameter type with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/sharing.py: Warning [SharingConfigurationViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.sharing_configuration.SharingConfiguration" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
'/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet > SurveySerializer]: unable to resolve type hint for function "get_conditions". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/api/survey.py: Warning [SurveyViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.feedback.survey.Survey" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/api/team.py: Warning [TeamViewSet > TeamSerializer]: unable to resolve type hint for function "get_product_intents". Consider using a type hint or @extend_schema_field. Defaulting to string.',
'/home/runner/work/posthog/posthog/posthog/api/web_experiment.py: Warning [WebExperimentViewSet]: could not derive type of path parameter "project_id" because model "posthog.models.web_experiment.WebExperiment" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportOrganizationViewSet]: could not derive type of path parameter "organization_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportRunViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExportRun" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet > BatchExportSerializer]: could not resolve serializer field "HogQLSelectQueryField(required=False)". Defaulting to "string"',
+ '/home/runner/work/posthog/posthog/posthog/batch_exports/http.py: Warning [BatchExportViewSet]: could not derive type of path parameter "project_id" because model "posthog.batch_exports.models.BatchExport" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_distinct_id". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_elements_chain". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_event". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_id". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_person". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_properties". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/models/event/util.py: Warning [EventViewSet > ClickhouseEventSerializer]: unable to resolve type hint for function "get_timestamp". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet > SessionRecordingSerializer]: unable to resolve type hint for function "storage". Consider using a type hint or @extend_schema_field. Defaulting to string.',
+ '/home/runner/work/posthog/posthog/posthog/session_recordings/session_recording_api.py: Warning [SessionRecordingViewSet]: could not derive type of path parameter "project_id" because model "posthog.session_recordings.models.session_recording.SessionRecording" contained no such field. Consider annotating parameter with @extend_schema. Defaulting to "string".',
+ '/opt/hostedtoolcache/Python/3.11.9/x64/lib/python3.11/site-packages/pydantic/_internal/_model_construction.py: Warning [QueryViewSet > ModelMetaclass]: Encountered 2 components with identical names "Person" and different classes and . This will very likely result in an incorrect schema. Try renaming one.',
+ 'Warning: encountered multiple names for the same choice set (EffectivePrivilegeLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
'Warning: encountered multiple names for the same choice set (HrefMatchingEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
- 'Warning: enum naming encountered a non-optimally resolvable collision for fields named "kind". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "KindCfaEnum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
+ 'Warning: encountered multiple names for the same choice set (MembershipLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
'Warning: enum naming encountered a non-optimally resolvable collision for fields named "kind". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "Kind069Enum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
+ 'Warning: enum naming encountered a non-optimally resolvable collision for fields named "kind". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "Kind0ddEnum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
+ 'Warning: enum naming encountered a non-optimally resolvable collision for fields named "kind". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "Kind496Enum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
+ 'Warning: enum naming encountered a non-optimally resolvable collision for fields named "kind". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "KindCfaEnum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
'Warning: enum naming encountered a non-optimally resolvable collision for fields named "type". The same name has been used for multiple choice sets in multiple components. The collision was resolved with "TypeF73Enum". add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
- 'Warning: encountered multiple names for the same choice set (EffectivePrivilegeLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
- 'Warning: encountered multiple names for the same choice set (MembershipLevelEnum). This may be unwanted even though the generated schema is technically correct. Add an entry to ENUM_NAME_OVERRIDES to fix the naming.',
- 'Warning: operationId "environments_app_metrics_historical_exports_retrieve" has collisions [(\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
- 'Warning: operationId "environments_insights_activity_retrieve" has collisions [(\'/api/environments/{project_id}/insights/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/insights/activity/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "Funnels" has collisions [(\'/api/environments/{project_id}/insights/funnel/\', \'post\'), (\'/api/projects/{project_id}/insights/funnel/\', \'post\')]. resolving with numeral suffixes.',
'Warning: operationId "Trends" has collisions [(\'/api/environments/{project_id}/insights/trend/\', \'post\'), (\'/api/projects/{project_id}/insights/trend/\', \'post\')]. resolving with numeral suffixes.',
- 'Warning: operationId "environments_persons_activity_retrieve" has collisions [(\'/api/environments/{project_id}/persons/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/persons/activity/\', \'get\')]. resolving with numeral suffixes.',
- 'Warning: operationId "list" has collisions [(\'/api/organizations/\', \'get\'), (\'/api/organizations/{organization_id}/projects/\', \'get\')]. resolving with numeral suffixes.',
- 'Warning: operationId "create" has collisions [(\'/api/organizations/\', \'post\'), (\'/api/organizations/{organization_id}/projects/\', \'post\')]. resolving with numeral suffixes.',
- 'Warning: operationId "retrieve" has collisions [(\'/api/organizations/{id}/\', \'get\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'get\')]. resolving with numeral suffixes.',
- 'Warning: operationId "update" has collisions [(\'/api/organizations/{id}/\', \'put\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'put\')]. resolving with numeral suffixes.',
- 'Warning: operationId "partial_update" has collisions [(\'/api/organizations/{id}/\', \'patch\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'patch\')]. resolving with numeral suffixes.',
- 'Warning: operationId "destroy" has collisions [(\'/api/organizations/{id}/\', \'delete\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'delete\')]. resolving with numeral suffixes.',
- 'Warning: operationId "batch_exports_list" has collisions [(\'/api/organizations/{organization_id}/batch_exports/\', \'get\'), (\'/api/projects/{project_id}/batch_exports/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "app_metrics_historical_exports_retrieve" has collisions [(\'/api/projects/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/projects/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "batch_exports_backfill_create" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/backfill/\', \'post\'), (\'/api/projects/{project_id}/batch_exports/{id}/backfill/\', \'post\')]. resolving with numeral suffixes.',
'Warning: operationId "batch_exports_create" has collisions [(\'/api/organizations/{organization_id}/batch_exports/\', \'post\'), (\'/api/projects/{project_id}/batch_exports/\', \'post\')]. resolving with numeral suffixes.',
- 'Warning: operationId "batch_exports_retrieve" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'get\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
- 'Warning: operationId "batch_exports_update" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'put\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'put\')]. resolving with numeral suffixes.',
- 'Warning: operationId "batch_exports_partial_update" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'patch\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'patch\')]. resolving with numeral suffixes.',
'Warning: operationId "batch_exports_destroy" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'delete\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'delete\')]. resolving with numeral suffixes.',
- 'Warning: operationId "batch_exports_backfill_create" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/backfill/\', \'post\'), (\'/api/projects/{project_id}/batch_exports/{id}/backfill/\', \'post\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "batch_exports_list" has collisions [(\'/api/organizations/{organization_id}/batch_exports/\', \'get\'), (\'/api/projects/{project_id}/batch_exports/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "batch_exports_logs_retrieve" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/logs/\', \'get\'), (\'/api/projects/{project_id}/batch_exports/{id}/logs/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "batch_exports_partial_update" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'patch\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'patch\')]. resolving with numeral suffixes.',
'Warning: operationId "batch_exports_pause_create" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/pause/\', \'post\'), (\'/api/projects/{project_id}/batch_exports/{id}/pause/\', \'post\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "batch_exports_retrieve" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'get\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "batch_exports_unpause_create" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/unpause/\', \'post\'), (\'/api/projects/{project_id}/batch_exports/{id}/unpause/\', \'post\')]. resolving with numeral suffixes.',
- 'Warning: operationId "app_metrics_historical_exports_retrieve" has collisions [(\'/api/projects/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/projects/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "batch_exports_update" has collisions [(\'/api/organizations/{organization_id}/batch_exports/{id}/\', \'put\'), (\'/api/projects/{project_id}/batch_exports/{id}/\', \'put\')]. resolving with numeral suffixes.',
'Warning: operationId "cohorts_activity_retrieve" has collisions [(\'/api/projects/{project_id}/cohorts/{id}/activity/\', \'get\'), (\'/api/projects/{project_id}/cohorts/activity/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "create" has collisions [(\'/api/organizations/\', \'post\'), (\'/api/organizations/{organization_id}/projects/\', \'post\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "destroy" has collisions [(\'/api/organizations/{id}/\', \'delete\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'delete\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "environments_app_metrics_historical_exports_retrieve" has collisions [(\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/\', \'get\'), (\'/api/environments/{project_id}/app_metrics/{plugin_config_id}/historical_exports/{id}/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "environments_insights_activity_retrieve" has collisions [(\'/api/environments/{project_id}/insights/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/insights/activity/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "environments_persons_activity_retrieve" has collisions [(\'/api/environments/{project_id}/persons/{id}/activity/\', \'get\'), (\'/api/environments/{project_id}/persons/activity/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "event_definitions_retrieve" has collisions [(\'/api/projects/{project_id}/event_definitions/\', \'get\'), (\'/api/projects/{project_id}/event_definitions/{id}/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "feature_flags_activity_retrieve" has collisions [(\'/api/projects/{project_id}/feature_flags/{id}/activity/\', \'get\'), (\'/api/projects/{project_id}/feature_flags/activity/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "insights_activity_retrieve" has collisions [(\'/api/projects/{project_id}/insights/{id}/activity/\', \'get\'), (\'/api/projects/{project_id}/insights/activity/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "list" has collisions [(\'/api/organizations/\', \'get\'), (\'/api/organizations/{organization_id}/projects/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "notebooks_activity_retrieve" has collisions [(\'/api/projects/{project_id}/notebooks/{short_id}/activity/\', \'get\'), (\'/api/projects/{project_id}/notebooks/activity/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "partial_update" has collisions [(\'/api/organizations/{id}/\', \'patch\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'patch\')]. resolving with numeral suffixes.',
'Warning: operationId "persons_activity_retrieve" has collisions [(\'/api/projects/{project_id}/persons/{id}/activity/\', \'get\'), (\'/api/projects/{project_id}/persons/activity/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "property_definitions_retrieve" has collisions [(\'/api/projects/{project_id}/property_definitions/\', \'get\'), (\'/api/projects/{project_id}/property_definitions/{id}/\', \'get\')]. resolving with numeral suffixes.',
+ 'Warning: operationId "retrieve" has collisions [(\'/api/organizations/{id}/\', \'get\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'get\')]. resolving with numeral suffixes.',
'Warning: operationId "surveys_activity_retrieve" has collisions [(\'/api/projects/{project_id}/surveys/{id}/activity/\', \'get\'), (\'/api/projects/{project_id}/surveys/activity/\', \'get\')]. resolving with numeral suffixes.',
- '',
+ 'Warning: operationId "update" has collisions [(\'/api/organizations/{id}/\', \'put\'), (\'/api/organizations/{organization_id}/projects/{id}/\', \'put\')]. resolving with numeral suffixes.',
])
# ---
diff --git a/posthog/api/test/test_api_docs.py b/posthog/api/test/test_api_docs.py
index f7f7ab1d418b0..d5e1fc9b3bbf6 100644
--- a/posthog/api/test/test_api_docs.py
+++ b/posthog/api/test/test_api_docs.py
@@ -30,4 +30,4 @@ def test_api_docs_generation_warnings_snapshot(self) -> None:
# we log lots of warnings when generating the schema
warnings = self._capsys.readouterr().err.split("\n")
- assert warnings == self._snapshot
+ assert sorted(warnings) == self._snapshot
diff --git a/posthog/api/test/test_organization_feature_flag.py b/posthog/api/test/test_organization_feature_flag.py
index 83550c689d5ae..65c43575f9b7e 100644
--- a/posthog/api/test/test_organization_feature_flag.py
+++ b/posthog/api/test/test_organization_feature_flag.py
@@ -3,7 +3,7 @@
from rest_framework import status
-from ee.models.organization_resource_access import OrganizationResourceAccess
+from ee.models.rbac.organization_resource_access import OrganizationResourceAccess
from posthog.api.dashboards.dashboard import Dashboard
from posthog.constants import AvailableFeature
from posthog.models import FeatureFlag
diff --git a/posthog/api/test/test_site_app.py b/posthog/api/test/test_site_app.py
index 9a428774c6ea7..92340e67144bd 100644
--- a/posthog/api/test/test_site_app.py
+++ b/posthog/api/test/test_site_app.py
@@ -41,6 +41,42 @@ def test_site_app(self):
f"function inject(){{}}().inject({{config:{{}},posthog:window['__$$ph_site_app_{plugin_config.id}']}})",
)
+ def test_cors_access(self):
+ plugin = Plugin.objects.create(organization=self.team.organization, name="My Plugin", plugin_type="source")
+ PluginSourceFile.objects.create(
+ plugin=plugin,
+ filename="site.ts",
+ source="export function inject (){}",
+ transpiled="function inject(){}",
+ status=PluginSourceFile.Status.TRANSPILED,
+ )
+ plugin_config = PluginConfig.objects.create(
+ plugin=plugin,
+ enabled=True,
+ order=1,
+ team=self.team,
+ config={},
+ web_token="tokentoken",
+ )
+
+ unauthenticated_client = Client(enforce_csrf_checks=True)
+ unauthenticated_client.logout()
+ request_headers = {"HTTP_ACCESS_CONTROL_REQUEST_METHOD": "GET", "HTTP_ORIGIN": "*", "USER_AGENT": "Agent 008"}
+ response = unauthenticated_client.get(
+ f"/site_app/{plugin_config.id}/tokentoken/somehash/",
+ data={},
+ follow=False,
+ secure=False,
+ headers={},
+ **request_headers,
+ )
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+ self.assertEqual(
+ response.content.decode("utf-8"),
+ f"function inject(){{}}().inject({{config:{{}},posthog:window['__$$ph_site_app_{plugin_config.id}']}})",
+ )
+
def test_get_site_config_from_schema(self):
schema: list[dict] = [{"key": "in_site", "site": True}, {"key": "not_in_site"}]
config = {"in_site": "123", "not_in_site": "12345"}
diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py
index c6de95a44702e..c79ab601b3369 100644
--- a/posthog/api/test/test_survey.py
+++ b/posthog/api/test/test_survey.py
@@ -11,6 +11,7 @@
from rest_framework import status
from posthog.api.survey import nh3_clean_with_allow_list
+from posthog.api.test.test_personal_api_keys import PersonalAPIKeysBaseTest
from posthog.constants import AvailableFeature
from posthog.models import Action, FeatureFlag, Team
from posthog.models.cohort.cohort import Cohort
@@ -2783,6 +2784,41 @@ def test_list_surveys_excludes_description(self):
assert surveys[1]["name"] == "Survey 2"
+class TestSurveyAPITokens(PersonalAPIKeysBaseTest, APIBaseTest):
+ def setUp(self):
+ super().setUp()
+ self.key.scopes = ["survey:read"]
+ self.key.save()
+
+ @freeze_time("2024-05-01 14:40:09")
+ def test_responses_count_works_with_survey_read(self):
+ survey_counts = {
+ "d63bb580-01af-4819-aae5-edcf7ef2044f": 3,
+ "fe7c4b62-8fc9-401e-b483-e4ff98fd13d5": 6,
+ "daed7689-d498-49fe-936f-e85554351b6c": 100,
+ }
+
+ earliest_survey = Survey.objects.create(team_id=self.team.id)
+ earliest_survey.start_date = datetime.now() - timedelta(days=101)
+ earliest_survey.save()
+
+ for survey_id, count in survey_counts.items():
+ for _ in range(count):
+ _create_event(
+ event="survey sent",
+ team=self.team,
+ distinct_id=self.user.id,
+ properties={"$survey_id": survey_id},
+ timestamp=datetime.now() - timedelta(days=count),
+ )
+
+ response = self._do_request(f"/api/projects/{self.team.id}/surveys/responses_count")
+ self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+ data = response.json()
+ self.assertEqual(data, survey_counts)
+
+
class TestResponsesCount(ClickhouseTestMixin, APIBaseTest):
@snapshot_clickhouse_queries
@freeze_time("2024-05-01 14:40:09")
diff --git a/posthog/clickhouse/migrations/0083_recreate_sessions_v1_after_limiting_teams.py b/posthog/clickhouse/migrations/0083_recreate_sessions_v1_after_limiting_teams.py
new file mode 100644
index 0000000000000..d0da2eb158b6d
--- /dev/null
+++ b/posthog/clickhouse/migrations/0083_recreate_sessions_v1_after_limiting_teams.py
@@ -0,0 +1,8 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.models.sessions.sql import DROP_SESSION_MATERIALIZED_VIEW_SQL, SESSIONS_TABLE_MV_SQL
+
+operations = [
+ # drop the mv, and recreate it with the new part of the WHERE clause
+ run_sql_with_exceptions(DROP_SESSION_MATERIALIZED_VIEW_SQL()),
+ run_sql_with_exceptions(SESSIONS_TABLE_MV_SQL()),
+]
diff --git a/posthog/clickhouse/test/__snapshots__/test_schema.ambr b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
index bc5a623ed0cdc..1fc54f30fdc78 100644
--- a/posthog/clickhouse/test/__snapshots__/test_schema.ambr
+++ b/posthog/clickhouse/test/__snapshots__/test_schema.ambr
@@ -2160,7 +2160,7 @@
sumIf(1, event='$autocapture') as autocapture_count
FROM posthog_test.sharded_events
- WHERE `$session_id` IS NOT NULL AND `$session_id` != ''
+ WHERE `$session_id` IS NOT NULL AND `$session_id` != '' AND team_id IN (1, 2, 13610, 19279, 21173, 29929, 32050, 9910, 11775, 21129, 31490)
GROUP BY `$session_id`, team_id
diff --git a/posthog/clickhouse/test/test_sessions_model.py b/posthog/clickhouse/test/test_sessions_model.py
index 0e3631e0ab3cd..0042456a03d95 100644
--- a/posthog/clickhouse/test/test_sessions_model.py
+++ b/posthog/clickhouse/test/test_sessions_model.py
@@ -1,8 +1,10 @@
from posthog.clickhouse.client import sync_execute, query_with_columns
+from posthog.models import Team
from posthog.test.base import (
_create_event,
ClickhouseTestMixin,
BaseTest,
+ ClickhouseDestroyTablesMixin,
)
distinct_id_counter = 0
@@ -21,7 +23,12 @@ def create_session_id():
return f"s{session_id_counter}"
-class TestSessionsModel(ClickhouseTestMixin, BaseTest):
+# only certain team ids can insert events into this legacy sessions table, see sessions/sql.py for more info
+TEAM_ID = 2
+TEAM = Team(id=TEAM_ID)
+
+
+class TestSessionsModel(ClickhouseDestroyTablesMixin, ClickhouseTestMixin, BaseTest):
def select_by_session_id(self, session_id):
return query_with_columns(
"""
@@ -34,7 +41,7 @@ def select_by_session_id(self, session_id):
""",
{
"session_id": session_id,
- "team_id": self.team.id,
+ "team_id": TEAM_ID,
},
)
@@ -42,7 +49,7 @@ def test_it_creates_session_when_creating_event(self):
distinct_id = create_distinct_id()
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$current_url": "/", "$session_id": session_id},
@@ -60,7 +67,7 @@ def test_it_creates_session_when_creating_event(self):
""",
{
"distinct_id": distinct_id,
- "team_id": self.team.id,
+ "team_id": TEAM_ID,
},
)
@@ -72,14 +79,14 @@ def test_handles_different_distinct_id_across_same_session(self):
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id1,
properties={"$session_id": session_id},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id2,
properties={"$session_id": session_id},
@@ -96,28 +103,28 @@ def test_handles_entry_and_exit_urls(self):
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$current_url": "/entry", "$session_id": session_id},
timestamp="2024-03-08:01",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$current_url": "/middle", "$session_id": session_id},
timestamp="2024-03-08:02",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$current_url": "/middle", "$session_id": session_id},
timestamp="2024-03-08:03",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$current_url": "/exit", "$session_id": session_id},
@@ -136,14 +143,14 @@ def test_handles_initial_utm_properties(self):
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id, "utm_source": "source"},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id, "utm_source": "other_source"},
@@ -159,35 +166,35 @@ def test_counts_pageviews_autocaptures_and_events(self):
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$autocapture",
distinct_id=distinct_id,
properties={"$session_id": session_id},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$autocapture",
distinct_id=distinct_id,
properties={"$session_id": session_id},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="other event",
distinct_id=distinct_id,
properties={"$session_id": session_id},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageleave",
distinct_id=distinct_id,
properties={"$session_id": session_id},
@@ -209,14 +216,14 @@ def test_separates_sessions_across_same_user(self):
session_id3 = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id1},
timestamp="2024-03-08",
)
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id2},
@@ -235,7 +242,7 @@ def test_select_from_sessions(self):
distinct_id = create_distinct_id()
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id},
@@ -260,7 +267,7 @@ def test_select_from_sessions(self):
""",
{
"session_id": session_id,
- "team_id": self.team.id,
+ "team_id": TEAM_ID,
},
)
self.assertEqual(len(responses), 1)
@@ -270,7 +277,7 @@ def test_select_from_sessions_mv(self):
distinct_id = create_distinct_id()
session_id = create_session_id()
_create_event(
- team=self.team,
+ team=TEAM,
event="$pageview",
distinct_id=distinct_id,
properties={"$session_id": session_id},
@@ -295,7 +302,7 @@ def test_select_from_sessions_mv(self):
""",
{
"session_id": session_id,
- "team_id": self.team.id,
+ "team_id": TEAM_ID,
},
)
self.assertEqual(len(responses), 1)
diff --git a/posthog/hogql/autocomplete.py b/posthog/hogql/autocomplete.py
index 09b5af34be0b9..0067b27bfca5c 100644
--- a/posthog/hogql/autocomplete.py
+++ b/posthog/hogql/autocomplete.py
@@ -36,7 +36,7 @@
HogQLAutocomplete,
HogQLAutocompleteResponse,
AutocompleteCompletionItem,
- Kind,
+ AutocompleteCompletionItemKind,
HogLanguage,
)
from hogvm.python.stl import STL
@@ -285,7 +285,7 @@ def append_table_field_to_response(table: Table, suggestions: list[AutocompleteC
extend_responses(
available_functions,
suggestions,
- Kind.FUNCTION,
+ AutocompleteCompletionItemKind.FUNCTION,
insert_text=lambda key: f"{key}()",
)
@@ -293,7 +293,7 @@ def append_table_field_to_response(table: Table, suggestions: list[AutocompleteC
def extend_responses(
keys: list[str],
suggestions: list[AutocompleteCompletionItem],
- kind: Kind = Kind.VARIABLE,
+ kind: AutocompleteCompletionItemKind = AutocompleteCompletionItemKind.VARIABLE,
insert_text: Optional[Callable[[str], str]] = None,
details: Optional[list[str | None]] = None,
) -> None:
@@ -448,12 +448,12 @@ def get_hogql_autocomplete(
extend_responses(
keys=hog_vars,
suggestions=response.suggestions,
- kind=Kind.VARIABLE,
+ kind=AutocompleteCompletionItemKind.VARIABLE,
)
extend_responses(
ALL_HOG_FUNCTIONS,
response.suggestions,
- Kind.FUNCTION,
+ AutocompleteCompletionItemKind.FUNCTION,
insert_text=lambda key: f"{key}()",
)
@@ -509,7 +509,7 @@ def get_hogql_autocomplete(
extend_responses(
keys=table_aliases,
suggestions=response.suggestions,
- kind=Kind.FOLDER,
+ kind=AutocompleteCompletionItemKind.FOLDER,
details=["Table"] * len(table_aliases),
)
break
@@ -608,7 +608,7 @@ def get_hogql_autocomplete(
extend_responses(
keys=table_names,
suggestions=response.suggestions,
- kind=Kind.FOLDER,
+ kind=AutocompleteCompletionItemKind.FOLDER,
details=["Table"] * len(table_names),
)
elif isinstance(node, ast.Field) and isinstance(parent_node, ast.Placeholder):
@@ -622,7 +622,7 @@ def get_hogql_autocomplete(
extend_responses(
keys=code_names,
suggestions=response.suggestions,
- kind=Kind.CONSTANT,
+ kind=AutocompleteCompletionItemKind.CONSTANT,
details=["Variable"] * len(code_names),
)
elif len(node.chain) > 1 and node.chain[0] == "variables":
@@ -633,7 +633,7 @@ def get_hogql_autocomplete(
extend_responses(
keys=code_names,
suggestions=response.suggestions,
- kind=Kind.CONSTANT,
+ kind=AutocompleteCompletionItemKind.CONSTANT,
details=["Variable"] * len(code_names),
)
except Exception:
@@ -692,6 +692,6 @@ def add_globals_to_suggestions(globalVars: dict, response: HogQLAutocompleteResp
extend_responses(
keys=list(globalVars.keys()),
suggestions=response.suggestions,
- kind=Kind.VARIABLE,
+ kind=AutocompleteCompletionItemKind.VARIABLE,
details=values,
)
diff --git a/posthog/hogql/database/schema/test/test_sessions_v1.py b/posthog/hogql/database/schema/test/test_sessions_v1.py
index eefd04197deab..77f41fd2f6bbc 100644
--- a/posthog/hogql/database/schema/test/test_sessions_v1.py
+++ b/posthog/hogql/database/schema/test/test_sessions_v1.py
@@ -8,6 +8,7 @@
)
from posthog.hogql.parser import parse_select
from posthog.hogql.query import execute_hogql_query
+from posthog.models import Team
from posthog.models.property_definition import PropertyType
from posthog.models.utils import uuid7
from posthog.schema import HogQLQueryModifiers, BounceRatePageViewMode, SessionTableVersion
@@ -15,16 +16,20 @@
APIBaseTest,
ClickhouseTestMixin,
_create_event,
- _create_person,
+ ClickhouseDestroyTablesMixin,
)
+# only certain team ids can insert events into this legacy sessions table, see sessions/sql.py for more info
+TEAM_ID = 2
+TEAM = Team(id=TEAM_ID, pk=TEAM_ID)
-class TestSessionsV1(ClickhouseTestMixin, APIBaseTest):
+
+class TestSessionsV1(ClickhouseDestroyTablesMixin, ClickhouseTestMixin, APIBaseTest):
def __execute(self, query):
modifiers = HogQLQueryModifiers(sessionTableVersion=SessionTableVersion.V1)
return execute_hogql_query(
query=query,
- team=self.team,
+ team=TEAM,
modifiers=modifiers,
)
@@ -33,7 +38,7 @@ def test_select_star(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"$current_url": "https://example.com", "$session_id": session_id},
)
@@ -56,7 +61,7 @@ def test_select_event_sessions_star(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"$current_url": "https://example.com", "$session_id": session_id},
)
@@ -93,7 +98,7 @@ def test_channel_type(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"gad_source": "1", "$session_id": session_id},
)
@@ -116,7 +121,7 @@ def test_event_dot_session_dot_channel_type(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"gad_source": "1", "$session_id": session_id},
)
@@ -139,7 +144,7 @@ def test_events_session_dot_channel_type(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"gad_source": "1", "$session_id": session_id},
)
@@ -157,57 +162,26 @@ def test_events_session_dot_channel_type(self):
"Paid Search",
)
- def test_persons_and_sessions_on_events(self):
- p1 = _create_person(distinct_ids=["d1"], team=self.team)
- p2 = _create_person(distinct_ids=["d2"], team=self.team)
-
- s1 = "session_test_persons_and_sessions_on_events_1"
- s2 = "session_test_persons_and_sessions_on_events_2"
-
- _create_event(
- event="$pageview",
- team=self.team,
- distinct_id="d1",
- properties={"$session_id": s1, "utm_source": "source1"},
- )
- _create_event(
- event="$pageview",
- team=self.team,
- distinct_id="d2",
- properties={"$session_id": s2, "utm_source": "source2"},
- )
-
- response = self.__execute(
- parse_select(
- "select events.person_id, session.$entry_utm_source from events where $session_id = {session_id} or $session_id = {session_id2} order by 2 asc",
- placeholders={"session_id": ast.Constant(value=s1), "session_id2": ast.Constant(value=s2)},
- ),
- )
-
- [row1, row2] = response.results or []
- self.assertEqual(row1, (p1.uuid, "source1"))
- self.assertEqual(row2, (p2.uuid, "source2"))
-
@parameterized.expand([(BounceRatePageViewMode.UNIQ_URLS,), (BounceRatePageViewMode.COUNT_PAGEVIEWS,)])
def test_bounce_rate(self, bounceRatePageViewMode):
# person with 2 different sessions
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"$session_id": "s1a", "$current_url": "https://example.com/1"},
timestamp="2023-12-02",
)
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"$session_id": "s1a", "$current_url": "https://example.com/2"},
timestamp="2023-12-03",
)
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={"$session_id": "s1b", "$current_url": "https://example.com/3"},
timestamp="2023-12-12",
@@ -215,7 +189,7 @@ def test_bounce_rate(self, bounceRatePageViewMode):
# session with 1 pageview
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d2",
properties={"$session_id": "s2", "$current_url": "https://example.com/4"},
timestamp="2023-12-11",
@@ -223,14 +197,14 @@ def test_bounce_rate(self, bounceRatePageViewMode):
# session with 1 pageview and 1 autocapture
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d3",
properties={"$session_id": "s3", "$current_url": "https://example.com/5"},
timestamp="2023-12-11",
)
_create_event(
event="$autocapture",
- team=self.team,
+ team=TEAM,
distinct_id="d3",
properties={"$session_id": "s3", "$current_url": "https://example.com/5"},
timestamp="2023-12-11",
@@ -238,14 +212,14 @@ def test_bounce_rate(self, bounceRatePageViewMode):
# short session with a pageleave
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d4",
properties={"$session_id": "s4", "$current_url": "https://example.com/6"},
timestamp="2023-12-11T12:00:00",
)
_create_event(
event="$pageleave",
- team=self.team,
+ team=TEAM,
distinct_id="d4",
properties={"$session_id": "s4", "$current_url": "https://example.com/6"},
timestamp="2023-12-11T12:00:01",
@@ -253,14 +227,14 @@ def test_bounce_rate(self, bounceRatePageViewMode):
# long session with a pageleave
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d5",
properties={"$session_id": "s5", "$current_url": "https://example.com/7"},
timestamp="2023-12-11T12:00:00",
)
_create_event(
event="$pageleave",
- team=self.team,
+ team=TEAM,
distinct_id="d5",
properties={"$session_id": "s5", "$current_url": "https://example.com/7"},
timestamp="2023-12-11T12:00:11",
@@ -269,7 +243,7 @@ def test_bounce_rate(self, bounceRatePageViewMode):
parse_select(
"select $is_bounce, session_id from sessions ORDER BY session_id",
),
- self.team,
+ TEAM,
modifiers=HogQLQueryModifiers(
bounceRatePageViewMode=bounceRatePageViewMode, sessionTableVersion=SessionTableVersion.V1
),
@@ -291,7 +265,7 @@ def test_can_use_v1_and_v2_fields(self):
_create_event(
event="$pageview",
- team=self.team,
+ team=TEAM,
distinct_id="d1",
properties={
"$current_url": "https://example.com/pathname",
@@ -372,4 +346,4 @@ def test_entry_utm(self):
def test_can_get_values_for_all(self):
results = get_lazy_session_table_properties_v1(None)
for prop in results:
- get_lazy_session_table_values_v1(key=prop["id"], team=self.team, search_term=None)
+ get_lazy_session_table_values_v1(key=prop["id"], team=TEAM, search_term=None)
diff --git a/posthog/hogql/test/test_autocomplete.py b/posthog/hogql/test/test_autocomplete.py
index 04f62bb0bdabc..d5bbefebe7b19 100644
--- a/posthog/hogql/test/test_autocomplete.py
+++ b/posthog/hogql/test/test_autocomplete.py
@@ -6,7 +6,13 @@
from posthog.hogql.database.schema.persons import PERSONS_FIELDS
from posthog.models.insight_variable import InsightVariable
from posthog.models.property_definition import PropertyDefinition
-from posthog.schema import HogQLAutocomplete, HogQLAutocompleteResponse, HogLanguage, HogQLQuery, Kind
+from posthog.schema import (
+ HogQLAutocomplete,
+ HogQLAutocompleteResponse,
+ HogLanguage,
+ HogQLQuery,
+ AutocompleteCompletionItemKind,
+)
from posthog.test.base import APIBaseTest, ClickhouseTestMixin
from posthog.warehouse.models.credential import DataWarehouseCredential
from posthog.warehouse.models.datawarehouse_saved_query import DataWarehouseSavedQuery
@@ -372,24 +378,24 @@ def test_autocomplete_hog(self):
query = "let var1 := 3; let otherVar := 5; print(v)"
results = self._program(query=query, start=41, end=41, database=database)
- suggestions = list(filter(lambda x: x.kind == Kind.VARIABLE, results.suggestions))
+ suggestions = list(filter(lambda x: x.kind == AutocompleteCompletionItemKind.VARIABLE, results.suggestions))
assert sorted([suggestion.label for suggestion in suggestions]) == ["event", "otherVar", "var1"]
- suggestions = list(filter(lambda x: x.kind == Kind.FUNCTION, results.suggestions))
+ suggestions = list(filter(lambda x: x.kind == AutocompleteCompletionItemKind.FUNCTION, results.suggestions))
assert len(suggestions) > 0
# 2
query = "let var1 := 3; let otherVar := 5; print(v)"
results = self._program(query=query, start=16, end=16, database=database)
- suggestions = list(filter(lambda x: x.kind == Kind.VARIABLE, results.suggestions))
+ suggestions = list(filter(lambda x: x.kind == AutocompleteCompletionItemKind.VARIABLE, results.suggestions))
assert sorted([suggestion.label for suggestion in suggestions]) == ["event", "var1"]
# 3
query = "let var1 := 3; let otherVar := 5; print(v)"
results = self._program(query=query, start=34, end=34, database=database)
- suggestions = list(filter(lambda x: x.kind == Kind.VARIABLE, results.suggestions))
+ suggestions = list(filter(lambda x: x.kind == AutocompleteCompletionItemKind.VARIABLE, results.suggestions))
assert sorted([suggestion.label for suggestion in suggestions]) == ["event", "otherVar", "var1"]
def test_autocomplete_variables(self):
diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
index f018e96ef067a..06e46e0ca30b0 100644
--- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
+++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
@@ -31,7 +31,7 @@
FROM events LEFT JOIN (
SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id
FROM person_static_cohort
- WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [4]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
+ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [6]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0))
LIMIT 100
SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0
@@ -42,7 +42,7 @@
FROM events LEFT JOIN (
SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id
FROM static_cohort_people
- WHERE in(cohort_id, [4])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
+ WHERE in(cohort_id, [6])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
WHERE and(1, equals(__in_cohort.matched, 1))
LIMIT 100
'''
@@ -55,7 +55,7 @@
FROM events LEFT JOIN (
SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id
FROM person_static_cohort
- WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [5]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
+ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [7]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0))
LIMIT 100
SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, max_ast_elements=4000000, max_expanded_ast_elements=4000000, max_bytes_before_external_group_by=0
@@ -66,7 +66,7 @@
FROM events LEFT JOIN (
SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id
FROM static_cohort_people
- WHERE in(cohort_id, [5])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
+ WHERE in(cohort_id, [7])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
WHERE and(1, equals(__in_cohort.matched, 1))
LIMIT 100
'''
diff --git a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
index c6783daa489e0..acb648172d287 100644
--- a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
+++ b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py
@@ -16,6 +16,7 @@
ExperimentFunnelsQueryResponse,
ExperimentSignificanceCode,
ExperimentVariantFunnelsBaseStats,
+ FunnelsFilter,
FunnelsQuery,
FunnelsQueryResponse,
InsightDateRange,
@@ -36,9 +37,9 @@ def __init__(self, *args, **kwargs):
self.experiment = Experiment.objects.get(id=self.query.experiment_id)
self.feature_flag = self.experiment.feature_flag
self.variants = [variant["key"] for variant in self.feature_flag.variants]
- self.prepared_funnel_query = self._prepare_funnel_query()
+ self.prepared_funnels_query = self._prepare_funnel_query()
self.funnels_query_runner = FunnelsQueryRunner(
- query=self.prepared_funnel_query, team=self.team, timings=self.timings, limit_context=self.limit_context
+ query=self.prepared_funnels_query, team=self.team, timings=self.timings, limit_context=self.limit_context
)
def calculate(self) -> ExperimentFunnelsQueryResponse:
@@ -46,6 +47,11 @@ def calculate(self) -> ExperimentFunnelsQueryResponse:
self._validate_event_variants(funnels_result)
+ # Filter results to only include valid variants in the first step
+ funnels_result.results = [
+ result for result in funnels_result.results if result[0]["breakdown_value"][0] in self.variants
+ ]
+
# Statistical analysis
control_variant, test_variants = self._get_variants_with_base_stats(funnels_result)
probabilities = calculate_probabilities(control_variant, test_variants)
@@ -53,7 +59,9 @@ def calculate(self) -> ExperimentFunnelsQueryResponse:
credible_intervals = calculate_credible_intervals([control_variant, *test_variants])
return ExperimentFunnelsQueryResponse(
- insight=funnels_result,
+ kind="ExperimentFunnelsQuery",
+ funnels_query=self.prepared_funnels_query,
+ insight=funnels_result.results,
variants=[variant.model_dump() for variant in [control_variant, *test_variants]],
probability={
variant.key: probability
@@ -74,8 +82,8 @@ def _prepare_funnel_query(self) -> FunnelsQuery:
2. Configure the breakdown to use the feature flag key, which allows us
to separate results for different experiment variants.
"""
- # Clone the source query
- prepared_funnel_query = FunnelsQuery(**self.query.source.model_dump())
+ # Clone the funnels query
+ prepared_funnels_query = FunnelsQuery(**self.query.funnels_query.model_dump())
# Set the date range to match the experiment's duration, using the project's timezone
if self.team.timezone:
@@ -86,19 +94,23 @@ def _prepare_funnel_query(self) -> FunnelsQuery:
start_date = self.experiment.start_date
end_date = self.experiment.end_date
- prepared_funnel_query.dateRange = InsightDateRange(
+ prepared_funnels_query.dateRange = InsightDateRange(
date_from=start_date.isoformat() if start_date else None,
date_to=end_date.isoformat() if end_date else None,
explicitDate=True,
)
# Configure the breakdown to use the feature flag key
- prepared_funnel_query.breakdownFilter = BreakdownFilter(
+ prepared_funnels_query.breakdownFilter = BreakdownFilter(
breakdown=f"$feature/{self.feature_flag.key}",
breakdown_type="event",
)
- return prepared_funnel_query
+ prepared_funnels_query.funnelsFilter = FunnelsFilter(
+ funnelVizType="steps",
+ )
+
+ return prepared_funnels_query
def _get_variants_with_base_stats(
self, funnels_result: FunnelsQueryResponse
@@ -178,4 +190,4 @@ def _validate_event_variants(self, funnels_result: FunnelsQueryResponse):
raise ValidationError(detail=json.dumps(errors))
def to_query(self) -> ast.SelectQuery:
- raise ValueError(f"Cannot convert source query of type {self.query.source.kind} to query")
+ raise ValueError(f"Cannot convert source query of type {self.query.funnels_query.kind} to query")
diff --git a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
index 7389b65a29bf6..2a74202b45f11 100644
--- a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
+++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py
@@ -106,12 +106,11 @@ def _prepare_count_query(self) -> TrendsQuery:
# :TRICKY: for `avg` aggregation, use `sum` data as an approximation
if prepared_count_query.series[0].math == PropertyMathType.AVG:
prepared_count_query.series[0].math = PropertyMathType.SUM
- prepared_count_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE)
# TODO: revisit this; using the count data for the remaining aggregation types is likely wrong
elif uses_math_aggregation:
prepared_count_query.series[0].math = None
- prepared_count_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE)
+ prepared_count_query.trendsFilter = TrendsFilter(display=ChartDisplayType.ACTIONS_LINE_GRAPH_CUMULATIVE)
prepared_count_query.dateRange = self._get_insight_date_range()
prepared_count_query.breakdownFilter = self._get_breakdown_filter()
prepared_count_query.properties = [
@@ -242,7 +241,6 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool):
count_result = shared_results["count_result"]
exposure_result = shared_results["exposure_result"]
-
if count_result is None or exposure_result is None:
raise ValueError("One or both query runners failed to produce a response")
@@ -255,7 +253,10 @@ def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool):
credible_intervals = calculate_credible_intervals([control_variant, *test_variants])
return ExperimentTrendsQueryResponse(
- insight=count_result,
+ kind="ExperimentTrendsQuery",
+ insight=count_result.results,
+ count_query=self.prepared_count_query,
+ exposure_query=self.prepared_exposure_query,
variants=[variant.model_dump() for variant in [control_variant, *test_variants]],
probability={
variant.key: probability
diff --git a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
index 005fe82e089ae..9d4963bb59824 100644
--- a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
+++ b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
@@ -69,7 +69,7 @@ def test_query_runner(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
@@ -130,7 +130,7 @@ def test_query_runner_standard_flow(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
experiment.metrics = [{"type": "primary", "query": experiment_query.model_dump()}]
@@ -213,7 +213,7 @@ def test_validate_event_variants_no_events(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team)
@@ -255,7 +255,7 @@ def test_validate_event_variants_no_control(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team)
@@ -297,7 +297,7 @@ def test_validate_event_variants_no_test(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team)
@@ -341,7 +341,7 @@ def test_validate_event_variants_no_flag_info(self):
experiment_query = ExperimentFunnelsQuery(
experiment_id=experiment.id,
kind="ExperimentFunnelsQuery",
- source=funnels_query,
+ funnels_query=funnels_query,
)
query_runner = ExperimentFunnelsQueryRunner(query=experiment_query, team=self.team)
diff --git a/posthog/hogql_queries/web_analytics/test/test_session_attribution_explorer_query_runner.py b/posthog/hogql_queries/web_analytics/test/test_session_attribution_explorer_query_runner.py
index d285ff2aa25f3..06ba1ddfeabbe 100644
--- a/posthog/hogql_queries/web_analytics/test/test_session_attribution_explorer_query_runner.py
+++ b/posthog/hogql_queries/web_analytics/test/test_session_attribution_explorer_query_runner.py
@@ -1,6 +1,5 @@
from typing import Optional
-from parameterized import parameterized
from posthog.hogql.constants import LimitContext
from posthog.hogql_queries.web_analytics.session_attribution_explorer_query_runner import (
@@ -80,7 +79,7 @@ def _run_session_attribution_query(
self,
date_from: Optional[str] = None,
date_to: Optional[str] = None,
- session_table_version: SessionTableVersion = SessionTableVersion.V1,
+ session_table_version: SessionTableVersion = SessionTableVersion.V2,
group_by: Optional[list[SessionAttributionGroupBy]] = None,
limit_context: Optional[LimitContext] = None,
properties: Optional[list[SessionPropertyFilter]] = None,
@@ -94,20 +93,14 @@ def _run_session_attribution_query(
runner = SessionAttributionExplorerQueryRunner(team=self.team, query=query, limit_context=limit_context)
return runner.calculate()
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_no_crash_when_no_data(self, session_table_version: SessionTableVersion):
- results = self._run_session_attribution_query(
- session_table_version=session_table_version,
- ).results
+ def test_no_crash_when_no_data(self):
+ results = self._run_session_attribution_query().results
assert results == [(0, [], [], [], [], [], [], [])]
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_group_by_nothing(self, session_table_version: SessionTableVersion):
+ def test_group_by_nothing(self):
self._create_data()
- results = self._run_session_attribution_query(
- session_table_version=session_table_version,
- ).results
+ results = self._run_session_attribution_query().results
assert results == [
(
@@ -122,12 +115,10 @@ def test_group_by_nothing(self, session_table_version: SessionTableVersion):
)
]
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_group_by_initial_url(self, session_table_version: SessionTableVersion):
+ def test_group_by_initial_url(self):
self._create_data()
results = self._run_session_attribution_query(
- session_table_version=session_table_version,
group_by=[SessionAttributionGroupBy.INITIAL_URL],
).results
@@ -164,12 +155,10 @@ def test_group_by_initial_url(self, session_table_version: SessionTableVersion):
),
]
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_group_channel_medium_source(self, session_table_version: SessionTableVersion):
+ def test_group_channel_medium_source(self):
self._create_data()
results = self._run_session_attribution_query(
- session_table_version=session_table_version,
group_by=[
SessionAttributionGroupBy.CHANNEL_TYPE,
SessionAttributionGroupBy.MEDIUM,
@@ -191,12 +180,10 @@ def test_group_channel_medium_source(self, session_table_version: SessionTableVe
(1, "Referral", ["referring_domain2"], "source2", "medium2", ["campaign2"], [], ["http://example.com/2"]),
]
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_filters(self, session_table_version: SessionTableVersion):
+ def test_filters(self):
self._create_data()
results = self._run_session_attribution_query(
- session_table_version=session_table_version,
group_by=[
SessionAttributionGroupBy.CHANNEL_TYPE,
SessionAttributionGroupBy.MEDIUM,
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_overview.py b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
index bc41d4d0a6785..3e9b570f57b9e 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_overview.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
@@ -2,7 +2,6 @@
from unittest.mock import MagicMock, patch
from freezegun import freeze_time
-from parameterized import parameterized
from posthog.clickhouse.client.execute import sync_execute
from posthog.hogql.constants import LimitContext
@@ -72,7 +71,7 @@ def _run_web_overview_query(
self,
date_from: str,
date_to: str,
- session_table_version: SessionTableVersion = SessionTableVersion.V1,
+ session_table_version: SessionTableVersion = SessionTableVersion.V2,
compare: bool = True,
limit_context: Optional[LimitContext] = None,
filter_test_accounts: Optional[bool] = False,
@@ -97,19 +96,16 @@ def _run_web_overview_query(
runner = WebOverviewQueryRunner(team=self.team, query=query, limit_context=limit_context)
return runner.calculate()
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_no_crash_when_no_data(self, session_table_version: SessionTableVersion):
+ def test_no_crash_when_no_data(self):
results = self._run_web_overview_query(
"2023-12-08",
"2023-12-15",
- session_table_version=session_table_version,
).results
assert [item.key for item in results] == ["visitors", "views", "sessions", "session duration", "bounce rate"]
results = self._run_web_overview_query(
"2023-12-08",
"2023-12-15",
- session_table_version=session_table_version,
includeLCPScore=True,
).results
assert [item.key for item in results] == [
@@ -132,9 +128,7 @@ def test_no_crash_when_no_data(self, session_table_version: SessionTableVersion)
}
],
)
- results = self._run_web_overview_query(
- "2023-12-08", "2023-12-15", session_table_version=session_table_version, action=action
- ).results
+ results = self._run_web_overview_query("2023-12-08", "2023-12-15", action=action).results
assert [item.key for item in results] == [
"visitors",
@@ -143,8 +137,7 @@ def test_no_crash_when_no_data(self, session_table_version: SessionTableVersion)
"conversion rate",
]
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_increase_in_users(self, session_table_version: SessionTableVersion):
+ def test_increase_in_users(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-12"))
s2 = str(uuid7("2023-12-11"))
@@ -159,7 +152,6 @@ def test_increase_in_users(self, session_table_version: SessionTableVersion):
results = self._run_web_overview_query(
"2023-12-08",
"2023-12-15",
- session_table_version=session_table_version,
).results
visitors = results[0]
@@ -192,8 +184,7 @@ def test_increase_in_users(self, session_table_version: SessionTableVersion):
self.assertEqual(0, bounce.previous)
self.assertEqual(None, bounce.changeFromPreviousPct)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_all_time(self, session_table_version: SessionTableVersion):
+ def test_all_time(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-12"))
s2 = str(uuid7("2023-12-11"))
@@ -208,7 +199,6 @@ def test_all_time(self, session_table_version: SessionTableVersion):
"all",
"2023-12-15",
compare=False,
- session_table_version=session_table_version,
).results
visitors = results[0]
@@ -241,15 +231,12 @@ def test_all_time(self, session_table_version: SessionTableVersion):
self.assertEqual(None, bounce.previous)
self.assertEqual(None, bounce.changeFromPreviousPct)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_filter_test_accounts(self, session_table_version: SessionTableVersion):
+ def test_filter_test_accounts(self):
s1 = str(uuid7("2023-12-02"))
# Create 1 test account
self._create_events([("test", [("2023-12-02", s1), ("2023-12-03", s1)])])
- results = self._run_web_overview_query(
- "2023-12-01", "2023-12-03", session_table_version=session_table_version, filter_test_accounts=True
- ).results
+ results = self._run_web_overview_query("2023-12-01", "2023-12-03", filter_test_accounts=True).results
visitors = results[0]
self.assertEqual(0, visitors.value)
@@ -267,21 +254,17 @@ def test_filter_test_accounts(self, session_table_version: SessionTableVersion):
self.assertEqual("bounce rate", bounce.key)
self.assertEqual(None, bounce.value)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_dont_filter_test_accounts(self, session_table_version: SessionTableVersion):
+ def test_dont_filter_test_accounts(self):
s1 = str(uuid7("2023-12-02"))
# Create 1 test account
self._create_events([("test", [("2023-12-02", s1), ("2023-12-03", s1)])])
- results = self._run_web_overview_query(
- "2023-12-01", "2023-12-03", session_table_version=session_table_version, filter_test_accounts=False
- ).results
+ results = self._run_web_overview_query("2023-12-01", "2023-12-03", filter_test_accounts=False).results
visitors = results[0]
self.assertEqual(1, visitors.value)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_correctly_counts_pageviews_in_long_running_session(self, session_table_version: SessionTableVersion):
+ def test_correctly_counts_pageviews_in_long_running_session(self):
# this test is important when using the v1 sessions table as the raw sessions table will have 3 entries, one per day
s1 = str(uuid7("2023-12-01"))
self._create_events(
@@ -293,7 +276,6 @@ def test_correctly_counts_pageviews_in_long_running_session(self, session_table_
results = self._run_web_overview_query(
"2023-12-01",
"2023-12-03",
- session_table_version=session_table_version,
).results
visitors = results[0]
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
index 10ce9ab1ebcb4..865424f5bc862 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py
@@ -1,7 +1,6 @@
from typing import Optional
from freezegun import freeze_time
-from parameterized import parameterized
from posthog.hogql_queries.web_analytics.stats_table import WebStatsTableQueryRunner
from posthog.models import Cohort
@@ -108,7 +107,7 @@ def _run_web_stats_table_query(
include_bounce_rate=False,
include_scroll_depth=False,
properties=None,
- session_table_version: SessionTableVersion = SessionTableVersion.V1,
+ session_table_version: SessionTableVersion = SessionTableVersion.V2,
filter_test_accounts: Optional[bool] = False,
):
modifiers = HogQLQueryModifiers(sessionTableVersion=session_table_version)
@@ -126,15 +125,14 @@ def _run_web_stats_table_query(
runner = WebStatsTableQueryRunner(team=self.team, query=query, modifiers=modifiers)
return runner.calculate()
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_no_crash_when_no_data(self, session_table_version: SessionTableVersion):
+ def test_no_crash_when_no_data(self):
results = self._run_web_stats_table_query(
- "2023-12-08", "2023-12-15", session_table_version=session_table_version
+ "2023-12-08",
+ "2023-12-15",
).results
self.assertEqual([], results)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_increase_in_users(self, session_table_version: SessionTableVersion):
+ def test_increase_in_users(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-13"))
s2 = str(uuid7("2023-12-10"))
@@ -145,9 +143,7 @@ def test_increase_in_users(self, session_table_version: SessionTableVersion):
]
)
- results = self._run_web_stats_table_query(
- "2023-12-01", "2023-12-11", session_table_version=session_table_version
- ).results
+ results = self._run_web_stats_table_query("2023-12-01", "2023-12-11").results
self.assertEqual(
[
@@ -157,8 +153,7 @@ def test_increase_in_users(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_all_time(self, session_table_version: SessionTableVersion):
+ def test_all_time(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-13"))
s2 = str(uuid7("2023-12-10"))
@@ -169,9 +164,7 @@ def test_all_time(self, session_table_version: SessionTableVersion):
]
)
- results = self._run_web_stats_table_query(
- "all", "2023-12-15", session_table_version=session_table_version
- ).results
+ results = self._run_web_stats_table_query("all", "2023-12-15").results
self.assertEqual(
[
@@ -182,38 +175,31 @@ def test_all_time(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_filter_test_accounts(self, session_table_version: SessionTableVersion):
+ def test_filter_test_accounts(self):
s1 = str(uuid7("2023-12-02"))
# Create 1 test account
self._create_events([("test", [("2023-12-02", s1, "/"), ("2023-12-03", s1, "/login")])])
- results = self._run_web_stats_table_query(
- "2023-12-01", "2023-12-03", session_table_version=session_table_version, filter_test_accounts=True
- ).results
+ results = self._run_web_stats_table_query("2023-12-01", "2023-12-03", filter_test_accounts=True).results
self.assertEqual(
[],
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_dont_filter_test_accounts(self, session_table_version: SessionTableVersion):
+ def test_dont_filter_test_accounts(self):
s1 = str(uuid7("2023-12-02"))
# Create 1 test account
self._create_events([("test", [("2023-12-02", s1, "/"), ("2023-12-03", s1, "/login")])])
- results = self._run_web_stats_table_query(
- "2023-12-01", "2023-12-03", session_table_version=session_table_version, filter_test_accounts=False
- ).results
+ results = self._run_web_stats_table_query("2023-12-01", "2023-12-03", filter_test_accounts=False).results
self.assertEqual(
[["/", 1, 1], ["/login", 1, 1]],
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_breakdown_channel_type_doesnt_throw(self, session_table_version: SessionTableVersion):
+ def test_breakdown_channel_type_doesnt_throw(self):
s1a = str(uuid7("2023-12-02"))
s1b = str(uuid7("2023-12-13"))
s2 = str(uuid7("2023-12-10"))
@@ -229,7 +215,6 @@ def test_breakdown_channel_type_doesnt_throw(self, session_table_version: Sessio
"2023-12-01",
"2023-12-03",
breakdown_by=WebStatsBreakdown.INITIAL_CHANNEL_TYPE,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -237,8 +222,7 @@ def test_breakdown_channel_type_doesnt_throw(self, session_table_version: Sessio
len(results),
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_limit(self, session_table_version: SessionTableVersion):
+ def test_limit(self):
s1 = str(uuid7("2023-12-02"))
s2 = str(uuid7("2023-12-10"))
self._create_events(
@@ -248,9 +232,7 @@ def test_limit(self, session_table_version: SessionTableVersion):
]
)
- response_1 = self._run_web_stats_table_query(
- "all", "2023-12-15", limit=1, session_table_version=session_table_version
- )
+ response_1 = self._run_web_stats_table_query("all", "2023-12-15", limit=1)
self.assertEqual(
[
["/", 2, 2],
@@ -269,8 +251,7 @@ def test_limit(self, session_table_version: SessionTableVersion):
)
self.assertEqual(False, response_2.hasMore)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_path_filters(self, session_table_version: SessionTableVersion):
+ def test_path_filters(self):
s1 = str(uuid7("2023-12-02"))
s2 = str(uuid7("2023-12-10"))
s3 = str(uuid7("2023-12-10"))
@@ -295,7 +276,6 @@ def test_path_filters(self, session_table_version: SessionTableVersion):
{"regex": "thing_a", "alias": "thing_b"},
{"regex": "thing_b", "alias": "thing_c"},
],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -308,8 +288,7 @@ def test_path_filters(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_scroll_depth_bounce_rate_one_user(self, session_table_version: SessionTableVersion):
+ def test_scroll_depth_bounce_rate_one_user(self):
self._create_pageviews(
"p1",
[
@@ -325,7 +304,6 @@ def test_scroll_depth_bounce_rate_one_user(self, session_table_version: SessionT
breakdown_by=WebStatsBreakdown.PAGE,
include_scroll_depth=True,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -337,8 +315,7 @@ def test_scroll_depth_bounce_rate_one_user(self, session_table_version: SessionT
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_scroll_depth_bounce_rate(self, session_table_version: SessionTableVersion):
+ def test_scroll_depth_bounce_rate(self):
self._create_pageviews(
"p1",
[
@@ -369,7 +346,6 @@ def test_scroll_depth_bounce_rate(self, session_table_version: SessionTableVersi
breakdown_by=WebStatsBreakdown.PAGE,
include_scroll_depth=True,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -381,8 +357,7 @@ def test_scroll_depth_bounce_rate(self, session_table_version: SessionTableVersi
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_scroll_depth_bounce_rate_with_filter(self, session_table_version: SessionTableVersion):
+ def test_scroll_depth_bounce_rate_with_filter(self):
self._create_pageviews(
"p1",
[
@@ -414,7 +389,6 @@ def test_scroll_depth_bounce_rate_with_filter(self, session_table_version: Sessi
include_scroll_depth=True,
include_bounce_rate=True,
properties=[EventPropertyFilter(key="$pathname", operator=PropertyOperator.EXACT, value="/a")],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -424,8 +398,7 @@ def test_scroll_depth_bounce_rate_with_filter(self, session_table_version: Sessi
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_scroll_depth_bounce_rate_path_cleaning(self, session_table_version: SessionTableVersion):
+ def test_scroll_depth_bounce_rate_path_cleaning(self):
self._create_pageviews(
"p1",
[
@@ -446,7 +419,6 @@ def test_scroll_depth_bounce_rate_path_cleaning(self, session_table_version: Ses
{"regex": "\\/b\\/\\d+", "alias": "/b/:id"},
{"regex": "\\/c\\/\\d+", "alias": "/c/:id"},
],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -458,8 +430,7 @@ def test_scroll_depth_bounce_rate_path_cleaning(self, session_table_version: Ses
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_bounce_rate_one_user(self, session_table_version: SessionTableVersion):
+ def test_bounce_rate_one_user(self):
self._create_pageviews(
"p1",
[
@@ -474,7 +445,6 @@ def test_bounce_rate_one_user(self, session_table_version: SessionTableVersion):
"2023-12-15",
breakdown_by=WebStatsBreakdown.PAGE,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -486,8 +456,7 @@ def test_bounce_rate_one_user(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_bounce_rate(self, session_table_version: SessionTableVersion):
+ def test_bounce_rate(self):
self._create_pageviews(
"p1",
[
@@ -517,7 +486,6 @@ def test_bounce_rate(self, session_table_version: SessionTableVersion):
"2023-12-15",
breakdown_by=WebStatsBreakdown.PAGE,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -529,8 +497,7 @@ def test_bounce_rate(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_bounce_rate_with_property(self, session_table_version: SessionTableVersion):
+ def test_bounce_rate_with_property(self):
self._create_pageviews(
"p1",
[
@@ -561,7 +528,6 @@ def test_bounce_rate_with_property(self, session_table_version: SessionTableVers
breakdown_by=WebStatsBreakdown.PAGE,
include_bounce_rate=True,
properties=[EventPropertyFilter(key="$pathname", operator=PropertyOperator.EXACT, value="/a")],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -571,8 +537,7 @@ def test_bounce_rate_with_property(self, session_table_version: SessionTableVers
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_bounce_rate_path_cleaning(self, session_table_version: SessionTableVersion):
+ def test_bounce_rate_path_cleaning(self):
self._create_pageviews(
"p1",
[
@@ -592,7 +557,6 @@ def test_bounce_rate_path_cleaning(self, session_table_version: SessionTableVers
{"regex": "\\/b\\/\\d+", "alias": "/b/:id"},
{"regex": "\\/c\\/\\d+", "alias": "/c/:id"},
],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -604,8 +568,7 @@ def test_bounce_rate_path_cleaning(self, session_table_version: SessionTableVers
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_entry_bounce_rate_one_user(self, session_table_version: SessionTableVersion):
+ def test_entry_bounce_rate_one_user(self):
self._create_pageviews(
"p1",
[
@@ -620,7 +583,6 @@ def test_entry_bounce_rate_one_user(self, session_table_version: SessionTableVer
"2023-12-15",
breakdown_by=WebStatsBreakdown.INITIAL_PAGE,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -630,8 +592,7 @@ def test_entry_bounce_rate_one_user(self, session_table_version: SessionTableVer
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_entry_bounce_rate(self, session_table_version: SessionTableVersion):
+ def test_entry_bounce_rate(self):
self._create_pageviews(
"p1",
[
@@ -661,7 +622,6 @@ def test_entry_bounce_rate(self, session_table_version: SessionTableVersion):
"2023-12-15",
breakdown_by=WebStatsBreakdown.INITIAL_PAGE,
include_bounce_rate=True,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -671,8 +631,7 @@ def test_entry_bounce_rate(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_entry_bounce_rate_with_property(self, session_table_version: SessionTableVersion):
+ def test_entry_bounce_rate_with_property(self):
self._create_pageviews(
"p1",
[
@@ -703,7 +662,6 @@ def test_entry_bounce_rate_with_property(self, session_table_version: SessionTab
breakdown_by=WebStatsBreakdown.INITIAL_PAGE,
include_bounce_rate=True,
properties=[EventPropertyFilter(key="$pathname", operator=PropertyOperator.EXACT, value="/a")],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -713,8 +671,7 @@ def test_entry_bounce_rate_with_property(self, session_table_version: SessionTab
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_entry_bounce_rate_path_cleaning(self, session_table_version: SessionTableVersion):
+ def test_entry_bounce_rate_path_cleaning(self):
self._create_pageviews(
"p1",
[
@@ -734,7 +691,6 @@ def test_entry_bounce_rate_path_cleaning(self, session_table_version: SessionTab
{"regex": "\\/b\\/\\d+", "alias": "/b/:id"},
{"regex": "\\/c\\/\\d+", "alias": "/c/:id"},
],
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -744,8 +700,7 @@ def test_entry_bounce_rate_path_cleaning(self, session_table_version: SessionTab
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_source_medium_campaign(self, session_table_version: SessionTableVersion):
+ def test_source_medium_campaign(self):
d1 = "d1"
s1 = str(uuid7("2024-06-26"))
@@ -785,7 +740,6 @@ def test_source_medium_campaign(self, session_table_version: SessionTableVersion
"all",
"2024-06-27",
breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE_MEDIUM_CAMPAIGN,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -793,8 +747,7 @@ def test_source_medium_campaign(self, session_table_version: SessionTableVersion
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_null_in_utm_tags(self, session_table_version: SessionTableVersion):
+ def test_null_in_utm_tags(self):
d1 = "d1"
s1 = str(uuid7("2024-06-26"))
@@ -836,7 +789,6 @@ def test_null_in_utm_tags(self, session_table_version: SessionTableVersion):
"all",
"2024-06-27",
breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE,
- session_table_version=session_table_version,
).results
self.assertEqual(
@@ -844,8 +796,7 @@ def test_null_in_utm_tags(self, session_table_version: SessionTableVersion):
results,
)
- @parameterized.expand([[SessionTableVersion.V1], [SessionTableVersion.V2]])
- def test_is_not_set_filter(self, session_table_version: SessionTableVersion):
+ def test_is_not_set_filter(self):
d1 = "d1"
s1 = str(uuid7("2024-06-26"))
@@ -888,7 +839,6 @@ def test_is_not_set_filter(self, session_table_version: SessionTableVersion):
"2024-06-27",
breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE,
properties=[EventPropertyFilter(key="utm_source", operator=PropertyOperator.IS_NOT_SET)],
- session_table_version=session_table_version,
).results
self.assertEqual(
diff --git a/posthog/models/feature_flag/permissions.py b/posthog/models/feature_flag/permissions.py
index 8f766b4fccc60..d2b0bb858a403 100644
--- a/posthog/models/feature_flag/permissions.py
+++ b/posthog/models/feature_flag/permissions.py
@@ -6,7 +6,7 @@ def can_user_edit_feature_flag(request, feature_flag):
# self hosted check for enterprise models that may not exist
try:
from ee.models.feature_flag_role_access import FeatureFlagRoleAccess
- from ee.models.organization_resource_access import OrganizationResourceAccess
+ from ee.models.rbac.organization_resource_access import OrganizationResourceAccess
except:
return True
else:
diff --git a/posthog/models/personal_api_key.py b/posthog/models/personal_api_key.py
index ea886b55757c6..cac5adefbc65d 100644
--- a/posthog/models/personal_api_key.py
+++ b/posthog/models/personal_api_key.py
@@ -1,4 +1,4 @@
-from typing import Optional, Literal, get_args
+from typing import Optional, Literal
import hashlib
from django.contrib.auth.hashers import PBKDF2PasswordHasher
@@ -66,56 +66,3 @@ class PersonalAPIKey(models.Model):
null=True,
blank=True,
)
-
-
-## API Scopes
-# These are the scopes that are used to define the permissions of the API tokens.
-# Not every model needs a scope - it should more be for top-level things
-# Typically each object should have `read` and `write` scopes, but some objects may have more specific scopes
-
-# WARNING: Make sure to keep in sync with the frontend!
-APIScopeObject = Literal[
- "action",
- "activity_log",
- "annotation",
- "batch_export",
- "cohort",
- "dashboard",
- "dashboard_template",
- "early_access_feature",
- "event_definition",
- "experiment",
- "export",
- "feature_flag",
- "group",
- "insight",
- "query", # Covers query and events endpoints
- "notebook",
- "organization",
- "organization_member",
- "person",
- "plugin",
- "project",
- "property_definition",
- "session_recording",
- "session_recording_playlist",
- "sharing_configuration",
- "subscription",
- "survey",
- "user",
- "webhook",
-]
-
-APIScopeActions = Literal[
- "read",
- "write",
-]
-
-APIScopeObjectOrNotSupported = Literal[
- APIScopeObject,
- "INTERNAL",
-]
-
-
-API_SCOPE_OBJECTS: tuple[APIScopeObject, ...] = get_args(APIScopeObject)
-API_SCOPE_ACTIONS: tuple[APIScopeActions, ...] = get_args(APIScopeActions)
diff --git a/posthog/models/scopes.py b/posthog/models/scopes.py
new file mode 100644
index 0000000000000..2bd0d48b405e5
--- /dev/null
+++ b/posthog/models/scopes.py
@@ -0,0 +1,60 @@
+## API Scopes
+# These are the scopes that are used to define the permissions of the API tokens.
+# Not every model needs a scope - it should more be for top-level things
+# Typically each object should have `read` and `write` scopes, but some objects may have more specific scopes
+
+# WARNING: Make sure to keep in sync with the frontend!
+from typing import Literal, get_args
+
+
+## API Scopes
+# These are the scopes that are used to define the permissions of the API tokens.
+# Not every model needs a scope - it should more be for top-level things
+# Typically each object should have `read` and `write` scopes, but some objects may have more specific scopes
+
+# WARNING: Make sure to keep in sync with the frontend!
+APIScopeObject = Literal[
+ "action",
+ "activity_log",
+ "annotation",
+ "batch_export",
+ "cohort",
+ "dashboard",
+ "dashboard_template",
+ "early_access_feature",
+ "event_definition",
+ "experiment",
+ "export",
+ "feature_flag",
+ "group",
+ "insight",
+ "query", # Covers query and events endpoints
+ "notebook",
+ "organization",
+ "organization_member",
+ "person",
+ "plugin",
+ "project",
+ "property_definition",
+ "session_recording",
+ "session_recording_playlist",
+ "sharing_configuration",
+ "subscription",
+ "survey",
+ "user",
+ "webhook",
+]
+
+APIScopeActions = Literal[
+ "read",
+ "write",
+]
+
+APIScopeObjectOrNotSupported = Literal[
+ APIScopeObject,
+ "INTERNAL",
+]
+
+
+API_SCOPE_OBJECTS: tuple[APIScopeObject, ...] = get_args(APIScopeObject)
+API_SCOPE_ACTIONS: tuple[APIScopeActions, ...] = get_args(APIScopeActions)
diff --git a/posthog/models/sessions/sql.py b/posthog/models/sessions/sql.py
index 680e1f7ff6f7b..93d3083a37763 100644
--- a/posthog/models/sessions/sql.py
+++ b/posthog/models/sessions/sql.py
@@ -7,6 +7,7 @@
AggregatingMergeTree,
)
+# V1 Sessions table
TABLE_BASE_NAME = "sessions"
SESSIONS_DATA_TABLE = lambda: f"sharded_{TABLE_BASE_NAME}"
@@ -21,6 +22,32 @@
)
DROP_SESSION_VIEW_SQL = lambda: f"DROP VIEW IF EXISTS {TABLE_BASE_NAME}_v ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'"
+# Only teams that were grandfathered into the V1 sessions table are allowed to use it. Everyone else should use V2,
+# i.e. raw_sessions. These teams were those who were seen to have changed their session table version in these metabase
+# queries:
+# US: https://metabase.prod-us.posthog.dev/question#eyJkYXRhc2V0X3F1ZXJ5Ijp7InR5cGUiOiJuYXRpdmUiLCJuYXRpdmUiOnsicXVlcnkiOiJTRUxFQ1QgdGVhbV9pZCwgc1xuRlJPTSAoXG4gICAgU0VMRUNUIG1vZGlmaWVycy0-PidzZXNzaW9uVGFibGVWZXJzaW9uJyBBUyBzLCBpZCBhcyB0ZWFtX2lkXG4gICAgRlJPTSBwb3N0aG9nX3RlYW1cbikgc3ViXG5XSEVSRSBzICE9ICcnIiwidGVtcGxhdGUtdGFncyI6e319LCJkYXRhYmFzZSI6MzR9LCJkaXNwbGF5IjoidGFibGUiLCJwYXJhbWV0ZXJzIjpbXSwidmlzdWFsaXphdGlvbl9zZXR0aW5ncyI6e319
+# EU: https://metabase.prod-eu.posthog.dev/question#eyJkYXRhc2V0X3F1ZXJ5Ijp7InR5cGUiOiJuYXRpdmUiLCJuYXRpdmUiOnsicXVlcnkiOiJTRUxFQ1QgdGVhbV9pZCwgc1xuRlJPTSAoXG4gICAgU0VMRUNUIG1vZGlmaWVycy0-PidzZXNzaW9uVGFibGVWZXJzaW9uJyBBUyBzLCBpZCBhcyB0ZWFtX2lkXG4gICAgRlJPTSBwb3N0aG9nX3RlYW1cbikgc3ViXG5XSEVSRSBzICE9ICcnIiwidGVtcGxhdGUtdGFncyI6e319LCJkYXRhYmFzZSI6MzR9LCJkaXNwbGF5IjoidGFibGUiLCJwYXJhbWV0ZXJzIjpbXSwidmlzdWFsaXphdGlvbl9zZXR0aW5ncyI6e319
+# or had contacted support about an issue.
+# This list exists because we want to reduce the number of writes happening to this table, and so we don't write to it
+# for any team not in this list. Adding a team to this is possible if needed, but would require changing this MV in
+# production and backfilling this table with the management command backfill_sessions_table.
+ALLOWED_TEAM_IDS = [
+ # posthog
+ 1,
+ 2,
+ # US query
+ 13610, # zendesk: https://posthoghelp.zendesk.com/agent/tickets/18001
+ 19279,
+ 21173,
+ 29929,
+ 32050,
+ # EU query
+ 9910,
+ 11775,
+ 21129,
+ 31490,
+]
+ALLOWED_TEAM_IDS_SQL = ", ".join(str(team_id) for team_id in ALLOWED_TEAM_IDS)
# if updating these column definitions
# you'll need to update the explicit column definitions in the materialized view creation statement below
@@ -144,7 +171,7 @@ def source_column(column_name: str) -> str:
sumIf(1, event='$autocapture') as autocapture_count
FROM {database}.sharded_events
-WHERE `$session_id` IS NOT NULL AND `$session_id` != ''
+WHERE `$session_id` IS NOT NULL AND `$session_id` != '' AND team_id IN ({allowed_team_ids})
GROUP BY `$session_id`, team_id
""".format(
database=settings.CLICKHOUSE_DATABASE,
@@ -168,6 +195,7 @@ def source_column(column_name: str) -> str:
mc_cid_property=source_column("mc_cid"),
igshid_property=source_column("igshid"),
ttclid_property=source_column("ttclid"),
+ allowed_team_ids=ALLOWED_TEAM_IDS_SQL,
)
)
diff --git a/posthog/permissions.py b/posthog/permissions.py
index 889832c13baa7..6de160e099567 100644
--- a/posthog/permissions.py
+++ b/posthog/permissions.py
@@ -19,7 +19,7 @@
from posthog.cloud_utils import is_cloud
from posthog.exceptions import EnterpriseFeatureException
from posthog.models import Organization, OrganizationMembership, Team, User
-from posthog.models.personal_api_key import APIScopeObjectOrNotSupported
+from posthog.models.scopes import APIScopeObjectOrNotSupported
from posthog.utils import get_can_create_org
CREATE_METHODS = ["POST", "PUT"]
diff --git a/posthog/schema.py b/posthog/schema.py
index b386d5d6c8e97..de42df84b396e 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -63,6 +63,16 @@ class AlertState(StrEnum):
SNOOZED = "Snoozed"
+class AssistantEventType(StrEnum):
+ STATUS = "status"
+ MESSAGE = "message"
+
+
+class AssistantGenerationStatusType(StrEnum):
+ ACK = "ack"
+ GENERATION_ERROR = "generation_error"
+
+
class AssistantMessage(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -75,9 +85,10 @@ class AssistantMessageType(StrEnum):
HUMAN = "human"
AI = "ai"
AI_VIZ = "ai/viz"
+ AI_FAILURE = "ai/failure"
-class Kind(StrEnum):
+class AutocompleteCompletionItemKind(StrEnum):
METHOD = "Method"
FUNCTION = "Function"
CONSTRUCTOR = "Constructor"
@@ -108,34 +119,6 @@ class Kind(StrEnum):
SNIPPET = "Snippet"
-class AutocompleteCompletionItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- detail: Optional[str] = Field(
- default=None,
- description=(
- "A human-readable string with additional information about this item, like type or symbol information."
- ),
- )
- documentation: Optional[str] = Field(
- default=None, description="A human-readable string that represents a doc-comment."
- )
- insertText: str = Field(
- ..., description="A string or snippet that should be inserted in a document when selecting this completion."
- )
- kind: Kind = Field(
- ..., description="The kind of this completion item. Based on the kind an icon is chosen by the editor."
- )
- label: str = Field(
- ...,
- description=(
- "The label of this completion item. By default this is also the text that is inserted when selecting this"
- " completion."
- ),
- )
-
-
class BaseMathType(StrEnum):
TOTAL = "total"
DAU = "dau"
@@ -558,6 +541,14 @@ class ExperimentVariantTrendsBaseStats(BaseModel):
key: str
+class FailureMessage(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ content: Optional[str] = None
+ type: Literal["ai/failure"] = "ai/failure"
+
+
class FilterLogicalOperator(StrEnum):
AND_ = "AND"
OR_ = "OR"
@@ -1132,24 +1123,11 @@ class RecordingPropertyFilter(BaseModel):
value: Optional[Union[str, float, list[Union[str, float]]]] = None
-class Kind1(StrEnum):
+class RetentionEntityKind(StrEnum):
ACTIONS_NODE = "ActionsNode"
EVENTS_NODE = "EventsNode"
-class RetentionEntity(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- custom_name: Optional[str] = None
- id: Optional[Union[str, float]] = None
- kind: Optional[Kind1] = None
- name: Optional[str] = None
- order: Optional[int] = None
- type: Optional[EntityType] = None
- uuid: Optional[str] = None
-
-
class RetentionReference(StrEnum):
TOTAL = "total"
PREVIOUS = "previous"
@@ -1563,48 +1541,12 @@ class WebGoalsQueryResponse(BaseModel):
types: Optional[list] = None
-class Kind2(StrEnum):
+class WebOverviewItemKind(StrEnum):
UNIT = "unit"
DURATION_S = "duration_s"
PERCENTAGE = "percentage"
-class WebOverviewItem(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- changeFromPreviousPct: Optional[float] = None
- isIncreaseBad: Optional[bool] = None
- key: str
- kind: Kind2
- previous: Optional[float] = None
- value: Optional[float] = None
-
-
-class WebOverviewQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
class WebStatsBreakdown(StrEnum):
PAGE = "Page"
INITIAL_PAGE = "InitialPage"
@@ -1745,6 +1687,41 @@ class AlertCondition(BaseModel):
type: AlertConditionType
+class AssistantGenerationStatusEvent(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ type: AssistantGenerationStatusType
+
+
+class AutocompleteCompletionItem(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ detail: Optional[str] = Field(
+ default=None,
+ description=(
+ "A human-readable string with additional information about this item, like type or symbol information."
+ ),
+ )
+ documentation: Optional[str] = Field(
+ default=None, description="A human-readable string that represents a doc-comment."
+ )
+ insertText: str = Field(
+ ..., description="A string or snippet that should be inserted in a document when selecting this completion."
+ )
+ kind: AutocompleteCompletionItemKind = Field(
+ ..., description="The kind of this completion item. Based on the kind an icon is chosen by the editor."
+ )
+ label: str = Field(
+ ...,
+ description=(
+ "The label of this completion item. By default this is also the text that is inserted when selecting this"
+ " completion."
+ ),
+ )
+
+
class Breakdown(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -1951,31 +1928,6 @@ class CachedEventsQueryResponse(BaseModel):
types: list[str]
-class CachedExperimentTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- credible_intervals: dict[str, list[float]]
- insight: TrendsQueryResponse
- is_cached: bool
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- p_value: float
- probability: dict[str, float]
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- significance_code: ExperimentSignificanceCode
- significant: bool
- timezone: str
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
class CachedFunnelCorrelationResponse(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -2350,39 +2302,6 @@ class CachedWebGoalsQueryResponse(BaseModel):
types: Optional[list] = None
-class CachedWebOverviewQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- is_cached: bool
- last_refresh: AwareDatetime
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- next_allowed_client_refresh: AwareDatetime
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timezone: str
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
class CachedWebStatsTableQueryResponse(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -2547,30 +2466,6 @@ class Response1(BaseModel):
types: list[str]
-class Response3(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
class Response4(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -2700,19 +2595,6 @@ class Response9(BaseModel):
)
-class Response11(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- insight: TrendsQueryResponse
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
class DataWarehousePersonPropertyFilter(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -2863,19 +2745,6 @@ class EventsQueryResponse(BaseModel):
types: list[str]
-class ExperimentTrendsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- insight: TrendsQueryResponse
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
class BreakdownFilter1(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -3313,30 +3182,6 @@ class QueryResponseAlternative8(BaseModel):
)
-class QueryResponseAlternative9(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
class QueryResponseAlternative10(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -3466,33 +3311,7 @@ class QueryResponseAlternative15(BaseModel):
)
-class QueryResponseAlternative16(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- insight: FunnelsQueryResponse
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class QueryResponseAlternative17(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- insight: TrendsQueryResponse
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
-class QueryResponseAlternative18(BaseModel):
+class QueryResponseAlternative18(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
@@ -3575,30 +3394,6 @@ class QueryResponseAlternative20(BaseModel):
types: Optional[list] = Field(default=None, description="Types of returned columns")
-class QueryResponseAlternative21(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- dateFrom: Optional[str] = None
- dateTo: Optional[str] = None
- error: Optional[str] = Field(
- default=None,
- description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
- )
- hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- results: list[WebOverviewItem]
- samplingRate: Optional[SamplingRate] = None
- timings: Optional[list[QueryTiming]] = Field(
- default=None, description="Measured timings for different parts of the query generation process"
- )
-
-
class QueryResponseAlternative22(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -3728,32 +3523,6 @@ class QueryResponseAlternative27(BaseModel):
)
-class QueryResponseAlternative28(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- insight: FunnelsQueryResponse
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
-class QueryResponseAlternative29(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- insight: TrendsQueryResponse
- p_value: float
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantTrendsBaseStats]
-
-
class QueryResponseAlternative30(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -3908,6 +3677,19 @@ class QueryResponseAlternative41(BaseModel):
)
+class RetentionEntity(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ custom_name: Optional[str] = None
+ id: Optional[Union[str, float]] = None
+ kind: Optional[RetentionEntityKind] = None
+ name: Optional[str] = None
+ order: Optional[int] = None
+ type: Optional[EntityType] = None
+ uuid: Optional[str] = None
+
+
class RetentionFilter(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -4161,23 +3943,40 @@ class WebGoalsQuery(BaseModel):
useSessionsTable: Optional[bool] = None
-class WebOverviewQuery(BaseModel):
+class WebOverviewItem(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
- compare: Optional[bool] = None
- conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
- dateRange: Optional[DateRange] = None
- filterTestAccounts: Optional[bool] = None
- includeLCPScore: Optional[bool] = None
- kind: Literal["WebOverviewQuery"] = "WebOverviewQuery"
+ changeFromPreviousPct: Optional[float] = None
+ isIncreaseBad: Optional[bool] = None
+ key: str
+ kind: WebOverviewItemKind
+ previous: Optional[float] = None
+ value: Optional[float] = None
+
+
+class WebOverviewQueryResponse(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ dateFrom: Optional[str] = None
+ dateTo: Optional[str] = None
+ error: Optional[str] = Field(
+ default=None,
+ description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
+ )
+ hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
modifiers: Optional[HogQLQueryModifiers] = Field(
default=None, description="Modifiers used when performing the query"
)
- properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
- response: Optional[WebOverviewQueryResponse] = None
- sampling: Optional[Sampling] = None
- useSessionsTable: Optional[bool] = None
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ results: list[WebOverviewItem]
+ samplingRate: Optional[SamplingRate] = None
+ timings: Optional[list[QueryTiming]] = Field(
+ default=None, description="Measured timings for different parts of the query generation process"
+ )
class WebStatsTableQuery(BaseModel):
@@ -4256,31 +4055,6 @@ class AnyResponseType(
]
-class CachedExperimentFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- cache_key: str
- cache_target_age: Optional[AwareDatetime] = None
- calculation_trigger: Optional[str] = Field(
- default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- insight: FunnelsQueryResponse
- is_cached: bool
- last_refresh: AwareDatetime
- next_allowed_client_refresh: AwareDatetime
- probability: dict[str, float]
- query_status: Optional[QueryStatus] = Field(
- default=None, description="Query status indicates whether next to the provided data, a query is still running."
- )
- significance_code: ExperimentSignificanceCode
- significant: bool
- timezone: str
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
class CachedHogQLQueryResponse(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -4375,6 +4149,39 @@ class CachedRetentionQueryResponse(BaseModel):
)
+class CachedWebOverviewQueryResponse(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ cache_key: str
+ cache_target_age: Optional[AwareDatetime] = None
+ calculation_trigger: Optional[str] = Field(
+ default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
+ )
+ dateFrom: Optional[str] = None
+ dateTo: Optional[str] = None
+ error: Optional[str] = Field(
+ default=None,
+ description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
+ )
+ hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
+ is_cached: bool
+ last_refresh: AwareDatetime
+ modifiers: Optional[HogQLQueryModifiers] = Field(
+ default=None, description="Modifiers used when performing the query"
+ )
+ next_allowed_client_refresh: AwareDatetime
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ results: list[WebOverviewItem]
+ samplingRate: Optional[SamplingRate] = None
+ timezone: str
+ timings: Optional[list[QueryTiming]] = Field(
+ default=None, description="Measured timings for different parts of the query generation process"
+ )
+
+
class DashboardFilter(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -4432,17 +4239,28 @@ class Response2(BaseModel):
types: Optional[list] = Field(default=None, description="Types of returned columns")
-class Response10(BaseModel):
+class Response3(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- insight: FunnelsQueryResponse
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantFunnelsBaseStats]
+ dateFrom: Optional[str] = None
+ dateTo: Optional[str] = None
+ error: Optional[str] = Field(
+ default=None,
+ description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
+ )
+ hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
+ modifiers: Optional[HogQLQueryModifiers] = Field(
+ default=None, description="Modifiers used when performing the query"
+ )
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ results: list[WebOverviewItem]
+ samplingRate: Optional[SamplingRate] = None
+ timings: Optional[list[QueryTiming]] = Field(
+ default=None, description="Measured timings for different parts of the query generation process"
+ )
class DataWarehouseNode(BaseModel):
@@ -4684,19 +4502,6 @@ class EventsNode(BaseModel):
response: Optional[dict[str, Any]] = None
-class ExperimentFunnelsQueryResponse(BaseModel):
- model_config = ConfigDict(
- extra="forbid",
- )
- credible_intervals: dict[str, list[float]]
- expected_loss: float
- insight: FunnelsQueryResponse
- probability: dict[str, float]
- significance_code: ExperimentSignificanceCode
- significant: bool
- variants: list[ExperimentVariantFunnelsBaseStats]
-
-
class FunnelExclusionActionsNode(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -4979,6 +4784,30 @@ class PropertyGroupFilterValue(BaseModel):
]
+class QueryResponseAlternative9(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ dateFrom: Optional[str] = None
+ dateTo: Optional[str] = None
+ error: Optional[str] = Field(
+ default=None,
+ description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.",
+ )
+ hogql: Optional[str] = Field(default=None, description="Generated HogQL query.")
+ modifiers: Optional[HogQLQueryModifiers] = Field(
+ default=None, description="Modifiers used when performing the query"
+ )
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ results: list[WebOverviewItem]
+ samplingRate: Optional[SamplingRate] = None
+ timings: Optional[list[QueryTiming]] = Field(
+ default=None, description="Measured timings for different parts of the query generation process"
+ )
+
+
class QueryResponseAlternative32(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -5058,6 +4887,25 @@ class TeamTaxonomyQuery(BaseModel):
response: Optional[TeamTaxonomyQueryResponse] = None
+class WebOverviewQuery(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ compare: Optional[bool] = None
+ conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None
+ dateRange: Optional[DateRange] = None
+ filterTestAccounts: Optional[bool] = None
+ includeLCPScore: Optional[bool] = None
+ kind: Literal["WebOverviewQuery"] = "WebOverviewQuery"
+ modifiers: Optional[HogQLQueryModifiers] = Field(
+ default=None, description="Modifiers used when performing the query"
+ )
+ properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]
+ response: Optional[WebOverviewQueryResponse] = None
+ sampling: Optional[Sampling] = None
+ useSessionsTable: Optional[bool] = None
+
+
class AIActionsNode(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -5530,6 +5378,50 @@ class VisualizationMessage(BaseModel):
type: Literal["ai/viz"] = "ai/viz"
+class CachedExperimentTrendsQueryResponse(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ cache_key: str
+ cache_target_age: Optional[AwareDatetime] = None
+ calculation_trigger: Optional[str] = Field(
+ default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
+ )
+ count_query: Optional[TrendsQuery] = None
+ credible_intervals: dict[str, list[float]]
+ exposure_query: Optional[TrendsQuery] = None
+ insight: list[dict[str, Any]]
+ is_cached: bool
+ kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
+ last_refresh: AwareDatetime
+ next_allowed_client_refresh: AwareDatetime
+ p_value: float
+ probability: dict[str, float]
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ timezone: str
+ variants: list[ExperimentVariantTrendsBaseStats]
+
+
+class Response11(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ count_query: Optional[TrendsQuery] = None
+ credible_intervals: dict[str, list[float]]
+ exposure_query: Optional[TrendsQuery] = None
+ insight: list[dict[str, Any]]
+ kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
+ p_value: float
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantTrendsBaseStats]
+
+
class ErrorTrackingQuery(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -5617,18 +5509,20 @@ class EventsQuery(BaseModel):
where: Optional[list[str]] = Field(default=None, description="HogQL filters to apply on returned data")
-class ExperimentTrendsQuery(BaseModel):
+class ExperimentTrendsQueryResponse(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
- count_query: TrendsQuery
- experiment_id: int
+ count_query: Optional[TrendsQuery] = None
+ credible_intervals: dict[str, list[float]]
exposure_query: Optional[TrendsQuery] = None
+ insight: list[dict[str, Any]]
kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
- modifiers: Optional[HogQLQueryModifiers] = Field(
- default=None, description="Modifiers used when performing the query"
- )
- response: Optional[ExperimentTrendsQueryResponse] = None
+ p_value: float
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantTrendsBaseStats]
class FunnelsQuery(BaseModel):
@@ -5928,6 +5822,68 @@ class LifecycleQuery(BaseModel):
)
+class QueryResponseAlternative16(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ credible_intervals: dict[str, list[float]]
+ expected_loss: float
+ funnels_query: Optional[FunnelsQuery] = None
+ insight: list[list[dict[str, Any]]]
+ kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantFunnelsBaseStats]
+
+
+class QueryResponseAlternative17(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ count_query: Optional[TrendsQuery] = None
+ credible_intervals: dict[str, list[float]]
+ exposure_query: Optional[TrendsQuery] = None
+ insight: list[dict[str, Any]]
+ kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
+ p_value: float
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantTrendsBaseStats]
+
+
+class QueryResponseAlternative28(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ credible_intervals: dict[str, list[float]]
+ expected_loss: float
+ funnels_query: Optional[FunnelsQuery] = None
+ insight: list[list[dict[str, Any]]]
+ kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantFunnelsBaseStats]
+
+
+class QueryResponseAlternative29(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ count_query: Optional[TrendsQuery] = None
+ credible_intervals: dict[str, list[float]]
+ exposure_query: Optional[TrendsQuery] = None
+ insight: list[dict[str, Any]]
+ kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
+ p_value: float
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantTrendsBaseStats]
+
+
class QueryResponseAlternative37(BaseModel):
model_config = ConfigDict(
extra="forbid",
@@ -5968,7 +5924,6 @@ class QueryResponseAlternative(
QueryResponseAlternative18,
QueryResponseAlternative19,
QueryResponseAlternative20,
- QueryResponseAlternative21,
QueryResponseAlternative22,
QueryResponseAlternative24,
QueryResponseAlternative25,
@@ -6011,7 +5966,6 @@ class QueryResponseAlternative(
QueryResponseAlternative18,
QueryResponseAlternative19,
QueryResponseAlternative20,
- QueryResponseAlternative21,
QueryResponseAlternative22,
QueryResponseAlternative24,
QueryResponseAlternative25,
@@ -6032,8 +5986,50 @@ class QueryResponseAlternative(
]
-class RootAssistantMessage(RootModel[Union[VisualizationMessage, AssistantMessage, HumanMessage]]):
- root: Union[VisualizationMessage, AssistantMessage, HumanMessage]
+class RootAssistantMessage(RootModel[Union[VisualizationMessage, AssistantMessage, HumanMessage, FailureMessage]]):
+ root: Union[VisualizationMessage, AssistantMessage, HumanMessage, FailureMessage]
+
+
+class CachedExperimentFunnelsQueryResponse(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ cache_key: str
+ cache_target_age: Optional[AwareDatetime] = None
+ calculation_trigger: Optional[str] = Field(
+ default=None, description="What triggered the calculation of the query, leave empty if user/immediate"
+ )
+ credible_intervals: dict[str, list[float]]
+ expected_loss: float
+ funnels_query: Optional[FunnelsQuery] = None
+ insight: list[list[dict[str, Any]]]
+ is_cached: bool
+ kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ last_refresh: AwareDatetime
+ next_allowed_client_refresh: AwareDatetime
+ probability: dict[str, float]
+ query_status: Optional[QueryStatus] = Field(
+ default=None, description="Query status indicates whether next to the provided data, a query is still running."
+ )
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ timezone: str
+ variants: list[ExperimentVariantFunnelsBaseStats]
+
+
+class Response10(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ credible_intervals: dict[str, list[float]]
+ expected_loss: float
+ funnels_query: Optional[FunnelsQuery] = None
+ insight: list[list[dict[str, Any]]]
+ kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantFunnelsBaseStats]
class DatabaseSchemaQueryResponse(BaseModel):
@@ -6052,17 +6048,33 @@ class DatabaseSchemaQueryResponse(BaseModel):
]
-class ExperimentFunnelsQuery(BaseModel):
+class ExperimentFunnelsQueryResponse(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
- experiment_id: int
+ credible_intervals: dict[str, list[float]]
+ expected_loss: float
+ funnels_query: Optional[FunnelsQuery] = None
+ insight: list[list[dict[str, Any]]]
kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ probability: dict[str, float]
+ significance_code: ExperimentSignificanceCode
+ significant: bool
+ variants: list[ExperimentVariantFunnelsBaseStats]
+
+
+class ExperimentTrendsQuery(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ count_query: TrendsQuery
+ experiment_id: int
+ exposure_query: Optional[TrendsQuery] = None
+ kind: Literal["ExperimentTrendsQuery"] = "ExperimentTrendsQuery"
modifiers: Optional[HogQLQueryModifiers] = Field(
default=None, description="Modifiers used when performing the query"
)
- response: Optional[ExperimentFunnelsQueryResponse] = None
- source: FunnelsQuery
+ response: Optional[ExperimentTrendsQueryResponse] = None
class FunnelPathsFilter(BaseModel):
@@ -6166,6 +6178,19 @@ class DatabaseSchemaQuery(BaseModel):
response: Optional[DatabaseSchemaQueryResponse] = None
+class ExperimentFunnelsQuery(BaseModel):
+ model_config = ConfigDict(
+ extra="forbid",
+ )
+ experiment_id: int
+ funnels_query: FunnelsQuery
+ kind: Literal["ExperimentFunnelsQuery"] = "ExperimentFunnelsQuery"
+ modifiers: Optional[HogQLQueryModifiers] = Field(
+ default=None, description="Modifiers used when performing the query"
+ )
+ response: Optional[ExperimentFunnelsQueryResponse] = None
+
+
class FunnelCorrelationQuery(BaseModel):
model_config = ConfigDict(
extra="forbid",
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index 6e32dff2e6138..2b5649b89b954 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -103,6 +103,7 @@
"posthog.health.healthcheck_middleware",
"posthog.middleware.ShortCircuitMiddleware",
"posthog.middleware.AllowIPMiddleware",
+ "whitenoise.middleware.WhiteNoiseMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"posthog.middleware.SessionAgeMiddleware",
"corsheaders.middleware.CorsMiddleware",
@@ -116,7 +117,6 @@
"posthog.middleware.AutoLogoutImpersonateMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"posthog.middleware.CsvNeverCacheMiddleware",
- "whitenoise.middleware.WhiteNoiseMiddleware",
"axes.middleware.AxesMiddleware",
"posthog.middleware.AutoProjectMiddleware",
"posthog.middleware.CHQueries",
@@ -255,7 +255,7 @@
LOGOUT_URL = "/logout"
LOGIN_REDIRECT_URL = "/"
APPEND_SLASH = False
-CORS_URLS_REGEX = r"^/api/(?!early_access_features|surveys|web_experiments).*$"
+CORS_URLS_REGEX = r"^(/site_app/|/api/(?!early_access_features|surveys|web_experiments).*$)"
CORS_ALLOW_HEADERS = default_headers + CORS_ALLOWED_TRACING_HEADERS
X_FRAME_OPTIONS = "SAMEORIGIN"
diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py
index 19973d3d84617..d26db8b976171 100644
--- a/posthog/temporal/batch_exports/temporary_file.py
+++ b/posthog/temporal/batch_exports/temporary_file.py
@@ -482,7 +482,7 @@ def write_dict(self, d: dict[str, typing.Any]) -> int:
# We tried, fallback to the slower but more permissive stdlib
# json.
logger.exception("PostHog $web_vitals event didn't match expected structure")
- dumped = json.dumps(d).encode("utf-8")
+ dumped = json.dumps(d, default=str).encode("utf-8")
n = self.batch_export_file.write(dumped + b"\n")
else:
dumped = orjson.dumps(d, default=str)
@@ -492,7 +492,7 @@ def write_dict(self, d: dict[str, typing.Any]) -> int:
# In this case, we fallback to the slower but more permissive stdlib
# json.
logger.exception("Orjson detected a deeply nested dict: %s", d)
- dumped = json.dumps(d).encode("utf-8")
+ dumped = json.dumps(d, default=str).encode("utf-8")
n = self.batch_export_file.write(dumped + b"\n")
else:
# Orjson is very strict about invalid unicode. This slow path protects us
diff --git a/posthog/test/base.py b/posthog/test/base.py
index 1958c630aa33d..0c40adacae97b 100644
--- a/posthog/test/base.py
+++ b/posthog/test/base.py
@@ -563,7 +563,7 @@ def assertQueryMatchesSnapshot(self, query, params=None, replace_all_numbers=Fal
#### Cohort replacements
# replace cohort id lists in queries too
query = re.sub(
- r"in((.*)?cohort_id, \[\d+(, ?\d+)*\])",
+ r"in((.*?)cohort_id, \[\d+(, ?\d+)*\])",
r"in(\1cohort_id, [1, 2, 3, 4, 5 /* ... */])",
query,
)
diff --git a/rust/common/metrics/src/lib.rs b/rust/common/metrics/src/lib.rs
index 9e82e98cc004f..967188db29fc3 100644
--- a/rust/common/metrics/src/lib.rs
+++ b/rust/common/metrics/src/lib.rs
@@ -46,7 +46,8 @@ pub fn setup_metrics_routes(router: Router) -> Router {
pub fn setup_metrics_recorder() -> PrometheusHandle {
const BUCKETS: &[f64] = &[
- 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0, 50.0, 100.0, 250.0,
+ 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0, 50.0, 100.0, 250.0, 500.0,
+ 1000.0, 2000.0, 5000.0, 10000.0,
];
PrometheusBuilder::new()
diff --git a/rust/cymbal/src/app_context.rs b/rust/cymbal/src/app_context.rs
index 5d7d130b89c9a..381c4de941f6b 100644
--- a/rust/cymbal/src/app_context.rs
+++ b/rust/cymbal/src/app_context.rs
@@ -1,8 +1,11 @@
-use std::time::Duration;
-
-use common_kafka::kafka_consumer::SingleTopicConsumer;
+use common_kafka::{
+ kafka_consumer::SingleTopicConsumer, kafka_producer::create_kafka_producer,
+ kafka_producer::KafkaContext,
+};
use health::{HealthHandle, HealthRegistry};
+use rdkafka::producer::FutureProducer;
use sqlx::{postgres::PgPoolOptions, PgPool};
+use std::time::Duration;
use tracing::info;
use crate::{
@@ -14,7 +17,8 @@ use crate::{
pub struct AppContext {
pub health_registry: HealthRegistry,
pub worker_liveness: HealthHandle,
- pub consumer: SingleTopicConsumer,
+ pub kafka_consumer: SingleTopicConsumer,
+ pub kafka_producer: FutureProducer,
pub pool: PgPool,
pub catalog: Catalog,
}
@@ -25,8 +29,15 @@ impl AppContext {
let worker_liveness = health_registry
.register("worker".to_string(), Duration::from_secs(60))
.await;
+ let kafka_liveness = health_registry
+ .register("rdkafka".to_string(), Duration::from_secs(30))
+ .await;
- let consumer = SingleTopicConsumer::new(config.kafka.clone(), config.consumer.clone())?;
+ let kafka_consumer =
+ SingleTopicConsumer::new(config.kafka.clone(), config.consumer.clone())?;
+ let kafka_producer = create_kafka_producer(&config.kafka, kafka_liveness)
+ .await
+ .expect("failed to create kafka producer");
let options = PgPoolOptions::new().max_connections(config.max_pg_connections);
let pool = options.connect(&config.database_url).await?;
@@ -44,7 +55,8 @@ impl AppContext {
Ok(Self {
health_registry,
worker_liveness,
- consumer,
+ kafka_consumer,
+ kafka_producer,
pool,
catalog,
})
diff --git a/rust/cymbal/src/langs/js.rs b/rust/cymbal/src/langs/js.rs
index 89deafdc46479..e8921d614518d 100644
--- a/rust/cymbal/src/langs/js.rs
+++ b/rust/cymbal/src/langs/js.rs
@@ -62,7 +62,7 @@ impl RawJSFrame {
(self, e).into()
}
- fn source_url(&self) -> Result {
+ pub fn source_url(&self) -> Result {
// We can't resolve a frame without a source ref, and are forced
// to assume this frame is not minified
let Some(source_url) = &self.source_url else {
diff --git a/rust/cymbal/src/main.rs b/rust/cymbal/src/main.rs
index 29086defed700..8087bff817872 100644
--- a/rust/cymbal/src/main.rs
+++ b/rust/cymbal/src/main.rs
@@ -1,4 +1,4 @@
-use std::{future::ready, sync::Arc};
+use std::{collections::HashMap, future::ready, sync::Arc};
use axum::{routing::get, Router};
use common_kafka::kafka_consumer::RecvErr;
@@ -8,7 +8,10 @@ use cymbal::{
app_context::AppContext,
config::Config,
error::Error,
- metric_consts::{ERRORS, EVENT_RECEIVED, MAIN_LOOP_TIME, PER_STACK_TIME, STACK_PROCESSED},
+ metric_consts::{
+ ERRORS, EVENT_RECEIVED, MAIN_LOOP_TIME, PER_FRAME_GROUP_TIME, PER_STACK_TIME,
+ STACK_PROCESSED,
+ },
types::{frames::RawFrame, ErrProps},
};
use envconfig::Envconfig;
@@ -64,7 +67,7 @@ async fn main() -> Result<(), Error> {
context.worker_liveness.report_healthy().await;
// Just grab the event as a serde_json::Value and immediately drop it,
// we can work out a real type for it later (once we're deployed etc)
- let (event, offset): (ClickHouseEvent, _) = match context.consumer.json_recv().await {
+ let (event, offset): (ClickHouseEvent, _) = match context.kafka_consumer.json_recv().await {
Ok(r) => r,
Err(RecvErr::Kafka(e)) => {
return Err(e.into()); // Just die if we recieve a Kafka error
@@ -119,21 +122,37 @@ async fn main() -> Result<(), Error> {
let stack_trace: &Vec = &trace.frames;
let per_stack = common_metrics::timing_guard(PER_STACK_TIME, &[]);
- let mut frames = Vec::with_capacity(stack_trace.len());
+
+ // Cluster the frames by symbol set
+ let mut groups = HashMap::new();
for frame in stack_trace {
- match frame.resolve(event.team_id, &context.catalog).await {
- Ok(r) => frames.push(r),
- Err(err) => {
- metrics::counter!(ERRORS, "cause" => "frame_not_parsable").increment(1);
- error!("Error parsing stack frame: {:?}", err);
- continue;
+ let group = groups
+ .entry(frame.symbol_set_group_key())
+ .or_insert_with(Vec::new);
+ group.push(frame.clone());
+ }
+
+ let team_id = event.team_id;
+ let mut results = Vec::with_capacity(stack_trace.len());
+ for (_, frames) in groups.into_iter() {
+ context.worker_liveness.report_healthy().await; // TODO - we shouldn't need to do this, but we do for now.
+ let mut any_success = false;
+ let per_frame_group = common_metrics::timing_guard(PER_FRAME_GROUP_TIME, &[]);
+ for frame in frames {
+ results.push(frame.resolve(team_id, &context.catalog).await);
+ if results.last().unwrap().is_ok() {
+ any_success = true;
}
- };
+ }
+ per_frame_group
+ .label("resolved_any", if any_success { "true" } else { "false" })
+ .fin();
}
+
per_stack
.label(
"resolved_any",
- if frames.is_empty() { "true" } else { "false" },
+ if results.is_empty() { "true" } else { "false" },
)
.fin();
whole_loop.label("had_frame", "true").fin();
diff --git a/rust/cymbal/src/metric_consts.rs b/rust/cymbal/src/metric_consts.rs
index 093636eda9d26..aa46aa4719f6c 100644
--- a/rust/cymbal/src/metric_consts.rs
+++ b/rust/cymbal/src/metric_consts.rs
@@ -15,3 +15,4 @@ pub const STORE_CACHE_EVICTIONS: &str = "cymbal_store_cache_evictions";
pub const MAIN_LOOP_TIME: &str = "cymbal_main_loop_time";
pub const PER_FRAME_TIME: &str = "cymbal_per_frame_time";
pub const PER_STACK_TIME: &str = "cymbal_per_stack_time";
+pub const PER_FRAME_GROUP_TIME: &str = "cymbal_per_frame_group_time";
diff --git a/rust/cymbal/src/types/frames.rs b/rust/cymbal/src/types/frames.rs
index 816ef78ba3782..f14840cfb0073 100644
--- a/rust/cymbal/src/types/frames.rs
+++ b/rust/cymbal/src/types/frames.rs
@@ -27,6 +27,11 @@ impl RawFrame {
res
}
+
+ pub fn symbol_set_group_key(&self) -> String {
+ let RawFrame::JavaScript(raw) = self;
+ raw.source_url().map(String::from).unwrap_or_default()
+ }
}
// We emit a single, unified representation of a frame, which is what we pass on to users.