@@ -26,7 +28,7 @@ export const UnsubscribeCard = ({ product }: { product: BillingProductV2Type }):
reduce your bill
{' '}
or{' '}
-
+
openSupportForm({ target_area: 'billing', isEmailFormOpen: true })}>
chat with support.
{' '}
Check out more about our pricing on our{' '}
diff --git a/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx b/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx
index d2b606f9016b6..123f46c02739a 100644
--- a/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx
+++ b/frontend/src/scenes/billing/UnsubscribeSurveyModal.tsx
@@ -2,6 +2,7 @@ import './UnsubscribeSurveyModal.scss'
import { LemonBanner, LemonButton, LemonCheckbox, LemonLabel, LemonModal, LemonTextArea, Link } from '@posthog/lemon-ui'
import { useActions, useValues } from 'kea'
+import { supportLogic } from 'lib/components/Support/supportLogic'
import { BillingProductV2AddonType, BillingProductV2Type } from '~/types'
@@ -32,6 +33,7 @@ export const UnsubscribeSurveyModal = ({
const { deactivateProduct, resetUnsubscribeError } = useActions(billingLogic)
const { unsubscribeError, billingLoading, billing } = useValues(billingLogic)
const { unsubscribeDisabledReason, itemsToDisable } = useValues(exportsUnsubscribeTableLogic)
+ const { openSupportForm } = useActions(supportLogic)
const textAreaNotEmpty = surveyResponse['$survey_response']?.length > 0
const includesPipelinesAddon =
@@ -150,10 +152,11 @@ export const UnsubscribeSurveyModal = ({
{`${product.type !== 'session_replay' ? ' or ' : ', '}`}
{
reportSurveyDismissed(surveyID)
+ openSupportForm({ target_area: 'billing', isEmailFormOpen: true })
}}
>
chat with support
diff --git a/frontend/src/scenes/billing/paymentEntryLogic.ts b/frontend/src/scenes/billing/paymentEntryLogic.ts
new file mode 100644
index 0000000000000..ebedbfe8b8afa
--- /dev/null
+++ b/frontend/src/scenes/billing/paymentEntryLogic.ts
@@ -0,0 +1,124 @@
+import { kea } from 'kea'
+import api from 'lib/api'
+import { urls } from 'scenes/urls'
+
+import type { paymentEntryLogicType } from './paymentEntryLogicType'
+
+export const paymentEntryLogic = kea
({
+ path: ['scenes', 'billing', 'PaymentEntryLogic'],
+
+ actions: {
+ setClientSecret: (clientSecret) => ({ clientSecret }),
+ setLoading: (loading) => ({ loading }),
+ setError: (error) => ({ error }),
+ initiateAuthorization: (redirectPath: string | null) => ({ redirectPath }),
+ pollAuthorizationStatus: true,
+ setAuthorizationStatus: (status: string | null) => ({ status }),
+ showPaymentEntryModal: true,
+ hidePaymentEntryModal: true,
+ setRedirectPath: (redirectPath: string | null) => ({ redirectPath }),
+ },
+
+ reducers: {
+ clientSecret: [
+ null,
+ {
+ setClientSecret: (_, { clientSecret }) => clientSecret,
+ },
+ ],
+ isLoading: [
+ false,
+ {
+ setLoading: (_, { loading }) => loading,
+ },
+ ],
+ error: [
+ null,
+ {
+ setError: (_, { error }) => error,
+ },
+ ],
+ authorizationStatus: [
+ null as string | null,
+ {
+ setAuthorizationStatus: (_, { status }) => status,
+ },
+ ],
+ paymentEntryModalOpen: [
+ false,
+ {
+ showPaymentEntryModal: () => true,
+ hidePaymentEntryModal: () => false,
+ },
+ ],
+ redirectPath: [
+ null as string | null,
+ {
+ setRedirectPath: (_, { redirectPath }) => redirectPath,
+ },
+ ],
+ },
+
+ listeners: ({ actions, values }) => ({
+ initiateAuthorization: async ({ redirectPath }) => {
+ actions.setLoading(true)
+ actions.setError(null)
+ try {
+ const response = await api.create('api/billing/activate/authorize')
+ actions.setClientSecret(response.clientSecret)
+ actions.setRedirectPath(redirectPath)
+ actions.setLoading(false)
+ } catch (error) {
+ actions.setError('Failed to initialize payment')
+ }
+ },
+
+ pollAuthorizationStatus: async () => {
+ const pollInterval = 2000 // Poll every 2 seconds
+ const maxAttempts = 30 // Max 1 minute of polling (30 * 2 seconds)
+ let attempts = 0
+
+ const poll = async (): Promise => {
+ try {
+ const urlParams = new URLSearchParams(window.location.search)
+ const paymentIntentId = urlParams.get('payment_intent')
+ const response = await api.create('api/billing/activate/authorize/status', {
+ payment_intent_id: paymentIntentId,
+ })
+ const status = response.status
+
+ actions.setAuthorizationStatus(status)
+
+ if (status === 'success') {
+ if (values.redirectPath) {
+ window.location.pathname = values.redirectPath
+ } else {
+ window.location.pathname = urls.organizationBilling()
+ }
+ return
+ } else if (status === 'failed') {
+ actions.setError('Payment failed')
+ return
+ }
+
+ attempts++
+ if (attempts < maxAttempts) {
+ setTimeout(() => void poll(), pollInterval)
+ } else {
+ actions.setError('Payment status check timed out')
+ }
+ } catch (error) {
+ actions.setError('Failed to check payment status')
+ } finally {
+ // Reset the state
+ actions.setLoading(false)
+ actions.setAuthorizationStatus(null)
+ actions.setClientSecret(null)
+ actions.setRedirectPath(null)
+ }
+ }
+
+ await poll()
+ },
+ }),
+})
diff --git a/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts
index 1036f771ce1d4..22d8f4c4692fb 100644
--- a/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts
+++ b/frontend/src/scenes/error-tracking/errorTrackingGroupSceneLogic.ts
@@ -1,4 +1,4 @@
-import { actions, connect, kea, listeners, path, props, reducers, selectors } from 'kea'
+import { actions, connect, kea, key, listeners, path, props, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import { actionToUrl, router, urlToAction } from 'kea-router'
import api from 'lib/api'
@@ -37,6 +37,7 @@ export enum ErrorGroupTab {
export const errorTrackingGroupSceneLogic = kea([
path((key) => ['scenes', 'error-tracking', 'errorTrackingGroupSceneLogic', key]),
props({} as ErrorTrackingGroupSceneLogicProps),
+ key((props) => JSON.stringify(props.fingerprint)),
connect({
values: [errorTrackingLogic, ['dateRange', 'filterTestAccounts', 'filterGroup', 'hasGroupActions']],
diff --git a/frontend/src/scenes/funnels/FunnelBarVertical/FunnelBarVertical.scss b/frontend/src/scenes/funnels/FunnelBarVertical/FunnelBarVertical.scss
index eb7a5c9666be3..0051fe2205c94 100644
--- a/frontend/src/scenes/funnels/FunnelBarVertical/FunnelBarVertical.scss
+++ b/frontend/src/scenes/funnels/FunnelBarVertical/FunnelBarVertical.scss
@@ -28,8 +28,6 @@
}
> td {
- // Sneaky hack to make height: 100% work in .StepLegend. The wonders of CSS - there's NO other way!
- height: 1px;
padding: 0.75rem 0;
}
}
diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts
index 5624d8b2028d1..4f94e984542d5 100644
--- a/frontend/src/scenes/sceneTypes.ts
+++ b/frontend/src/scenes/sceneTypes.ts
@@ -55,6 +55,7 @@ export enum Scene {
AsyncMigrations = 'AsyncMigrations',
DeadLetterQueue = 'DeadLetterQueue',
Billing = 'Billing',
+ BillingAuthorizationStatus = 'BillingAuthorizationStatus',
SavedInsights = 'SavedInsights',
ToolbarLaunch = 'ToolbarLaunch',
Site = 'Site',
diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts
index 9c233541f99ee..c90772eef7fa4 100644
--- a/frontend/src/scenes/scenes.ts
+++ b/frontend/src/scenes/scenes.ts
@@ -350,6 +350,11 @@ export const sceneConfigurations: Record = {
organizationBased: true,
defaultDocsPath: '/pricing',
},
+ [Scene.BillingAuthorizationStatus]: {
+ hideProjectNotice: true,
+ organizationBased: true,
+ defaultDocsPath: '/pricing',
+ },
[Scene.Unsubscribe]: {
allowUnauthenticated: true,
layout: 'app-raw',
@@ -542,6 +547,7 @@ export const routes: Record = {
[urls.max()]: Scene.Max,
[urls.projectCreateFirst()]: Scene.ProjectCreateFirst,
[urls.organizationBilling()]: Scene.Billing,
+ [urls.billingAuthorizationStatus()]: Scene.BillingAuthorizationStatus,
[urls.organizationCreateFirst()]: Scene.OrganizationCreateFirst,
[urls.organizationCreationConfirm()]: Scene.OrganizationCreationConfirm,
[urls.instanceStatus()]: Scene.SystemStatus,
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index 13e83e799eac7..146c561e225e1 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -182,6 +182,7 @@ export const urls = {
// Cloud only
organizationBilling: (products?: ProductKey[]): string =>
`/organization/billing${products && products.length ? `?products=${products.join(',')}` : ''}`,
+ billingAuthorizationStatus: (): string => `/billing/authorization_status`,
// Self-hosted only
instanceStatus: (): string => '/instance/status',
instanceStaffUsers: (): string => '/instance/staff_users',
diff --git a/frontend/src/toolbar/bar/Toolbar.tsx b/frontend/src/toolbar/bar/Toolbar.tsx
index 8c670fa3f3e5e..a24492ecec66e 100644
--- a/frontend/src/toolbar/bar/Toolbar.tsx
+++ b/frontend/src/toolbar/bar/Toolbar.tsx
@@ -201,6 +201,7 @@ export function Toolbar(): JSX.Element | null {
{
'--toolbar-button-x': `${position.x}px`,
'--toolbar-button-y': `${position.y}px`,
+ width: showExperiments ? '334px' : 'var(--toolbar-width-expanded)',
} as any
}
>
diff --git a/funnel-udf/src/main.rs b/funnel-udf/src/main.rs
index 6169b0de2f824..ec45afdad2f58 100644
--- a/funnel-udf/src/main.rs
+++ b/funnel-udf/src/main.rs
@@ -1,9 +1,9 @@
+mod steps;
+mod trends;
+
+use std::env;
use serde::{Deserialize, Serialize};
-use serde_json::json;
use std::io::{self, BufRead, Write};
-use std::iter::repeat;
-use itertools::{Itertools};
-use uuid::Uuid;
#[derive(Clone, PartialEq, Deserialize, Serialize)]
#[serde(untagged)]
@@ -13,251 +13,19 @@ enum PropVal {
Int(u32),
}
-#[derive(Clone, Deserialize)]
-struct EnteredTimestamp {
- timestamp: f64,
- timings: Vec,
- uuids: Vec,
-}
-
-#[derive(Clone, Deserialize)]
-struct Event {
- timestamp: f64,
- uuid: Uuid,
- breakdown: PropVal,
- steps: Vec,
-}
-
-#[derive(Deserialize)]
-struct Args {
- num_steps: usize,
- conversion_window_limit: u64, // In seconds
- breakdown_attribution_type: String,
- funnel_order_type: String,
- prop_vals: Vec,
- value: Vec,
-}
-
-#[derive(Serialize)]
-struct Result(i8, PropVal, Vec, Vec>);
-
-struct Vars {
- max_step: (usize, EnteredTimestamp),
- event_uuids: Vec>,
- entered_timestamp: Vec
-}
-
-struct AggregateFunnelRow {
- breakdown_step: Option,
- results: Vec,
-}
-
-const MAX_REPLAY_EVENTS: usize = 10;
-
-const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp {
- timestamp: 0.0,
- timings: vec![],
- uuids: vec![],
-};
-
-#[inline(always)]
-fn parse_args(line: &str) -> Args {
- serde_json::from_str(line).expect("Invalid JSON input")
-}
-
-impl AggregateFunnelRow {
- #[inline(always)]
- fn calculate_funnel_from_user_events(&mut self, args: &Args) -> &Vec {
- if args.breakdown_attribution_type.starts_with("step_") {
- self.breakdown_step = args.breakdown_attribution_type[5..].parse::().ok()
- }
-
- args.prop_vals.iter().for_each(|prop_val| self.loop_prop_val(args, prop_val));
-
- &self.results
- }
-
- #[inline(always)]
- fn loop_prop_val(&mut self, args: &Args, prop_val: &PropVal) {
- let mut vars = Vars {
- max_step: (0, DEFAULT_ENTERED_TIMESTAMP.clone()),
- event_uuids: repeat(Vec::new()).take(args.num_steps).collect(),
- entered_timestamp: vec![DEFAULT_ENTERED_TIMESTAMP.clone(); args.num_steps + 1]
- };
-
- let filtered_events = args.value.iter()
- .filter(|e| {
- if args.breakdown_attribution_type == "all_events" {
- e.breakdown == *prop_val
- } else {
- true
- }
- })
- .group_by(|e| e.timestamp);
-
- for (timestamp, events_with_same_timestamp) in &filtered_events {
- let events_with_same_timestamp: Vec<_> = events_with_same_timestamp.collect();
- vars.entered_timestamp[0] = EnteredTimestamp {
- timestamp,
- timings: vec![],
- uuids: vec![],
- };
-
- if events_with_same_timestamp.len() == 1 {
- if !self.process_event(
- args,
- &mut vars,
- &events_with_same_timestamp[0],
- prop_val,
- false
- ) {
- return;
- }
- } else if events_with_same_timestamp.iter().map(|x| &x.steps).all_equal() {
- // Deal with the most common case where they are all the same event (order doesn't matter)
- for event in events_with_same_timestamp {
- if !self.process_event(
- args,
- &mut vars,
- event,
- prop_val,
- false
- ) {
- return;
- }
- }
- } else {
- // Handle permutations for different events with the same timestamp
- // We ignore strict steps and exclusions in this case
- // The behavior here is mostly dictated by how it was handled in the old style
-
- let sorted_events = events_with_same_timestamp
- .iter()
- .flat_map(|&event| {
- event.steps
- .iter()
- .filter(|&&step| step > 0)
- .map(|&step| Event { steps: vec![step], ..event.clone() })
- }).sorted_by_key(|event| event.steps[0]);
-
- // Run exclusions, if they exist, then run matching events.
- for event in sorted_events {
- if !self.process_event(
- args,
- &mut vars,
- &event,
- &prop_val,
- true
- ) {
- return;
- }
- }
- }
-
- // If we hit the goal, we can terminate early
- if vars.entered_timestamp[args.num_steps].timestamp > 0.0 {
- break;
- }
- }
-
- // Find the furthest step we have made it to and print it
- let final_index = vars.max_step.0;
- let final_value = &vars.max_step.1;
-
- for i in 0..final_index {
- //if event_uuids[i].len() >= MAX_REPLAY_EVENTS && !event_uuids[i].contains(&final_value.uuids[i]) {
- // Always put the actual event uuids first, we use it to extract timestamps
- // This might create duplicates, but that's fine (we can remove it in clickhouse)
- vars.event_uuids[i].insert(0, final_value.uuids[i].clone());
- }
- self.results.push(Result(
- final_index as i8 - 1,
- prop_val.clone(),
- final_value.timings.windows(2).map(|w| w[1] - w[0]).collect(),
- vars.event_uuids,
- ))
- }
-
- #[inline(always)]
- fn process_event(
- &mut self,
- args: &Args,
- vars: &mut Vars,
- event: &Event,
- prop_val: &PropVal,
- processing_multiple_events: bool
- ) -> bool {
- for step in event.steps.iter().rev() {
- let mut exclusion = false;
- let step = (if *step < 0 {
- exclusion = true;
- -*step
- } else {
- *step
- }) as usize;
-
- let in_match_window = (event.timestamp - vars.entered_timestamp[step - 1].timestamp) <= args.conversion_window_limit as f64;
- let already_reached_this_step = vars.entered_timestamp[step].timestamp == vars.entered_timestamp[step - 1].timestamp
- && vars.entered_timestamp[step].timestamp != 0.0;
-
- if in_match_window && !already_reached_this_step {
- if exclusion {
- self.results.push(Result(-1, prop_val.clone(), vec![], vec![]));
- return false;
- }
- let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown;
- let already_used_event = processing_multiple_events && vars.entered_timestamp[step-1].uuids.contains(&event.uuid);
- if !is_unmatched_step_attribution && !already_used_event {
- vars.entered_timestamp[step] = EnteredTimestamp {
- timestamp: vars.entered_timestamp[step - 1].timestamp,
- timings: {
- let mut timings = vars.entered_timestamp[step - 1].timings.clone();
- timings.push(event.timestamp);
- timings
- },
- uuids: {
- let mut uuids = vars.entered_timestamp[step - 1].uuids.clone();
- uuids.push(event.uuid);
- uuids
- },
- };
- if vars.event_uuids[step - 1].len() < MAX_REPLAY_EVENTS - 1 {
- vars.event_uuids[step - 1].push(event.uuid);
- }
- if step > vars.max_step.0 {
- vars.max_step = (step, vars.entered_timestamp[step].clone());
- }
- }
- }
- }
-
- // If a strict funnel, clear all of the steps that we didn't match to
- // If we are processing multiple events, skip this step, because ordering makes it complicated
- if !processing_multiple_events && args.funnel_order_type == "strict" {
- for i in 1..vars.entered_timestamp.len() {
- if !event.steps.contains(&(i as i8)) {
- vars.entered_timestamp[i] = DEFAULT_ENTERED_TIMESTAMP;
- }
- }
- }
-
- true
- }
-}
-
fn main() {
+ let args: Vec = env::args().collect();
+ let arg = args.get(1).map(|x| x.as_str());
+
let stdin = io::stdin();
let mut stdout = io::stdout();
for line in stdin.lock().lines() {
if let Ok(line) = line {
- let args = parse_args(&line);
- let mut aggregate_funnel_row = AggregateFunnelRow {
- results: Vec::with_capacity(args.prop_vals.len()),
- breakdown_step: Option::None,
+ let output = match arg {
+ Some("trends") => trends::process_line(&line),
+ _ => steps::process_line(&line),
};
- let result = aggregate_funnel_row.calculate_funnel_from_user_events(&args);
- let output = json!({ "result": result });
writeln!(stdout, "{}", output).unwrap();
stdout.flush().unwrap();
}
diff --git a/funnel-udf/src/steps.rs b/funnel-udf/src/steps.rs
new file mode 100644
index 0000000000000..a5075ecc4e864
--- /dev/null
+++ b/funnel-udf/src/steps.rs
@@ -0,0 +1,248 @@
+use std::iter::repeat;
+use itertools::Itertools;
+use serde::{Deserialize, Serialize};
+use serde_json::{json, Value};
+use uuid::Uuid;
+use crate::PropVal;
+
+#[derive(Clone, Deserialize)]
+struct EnteredTimestamp {
+ timestamp: f64,
+ timings: Vec,
+ uuids: Vec,
+}
+
+#[derive(Clone, Deserialize)]
+struct Event {
+ timestamp: f64,
+ uuid: Uuid,
+ breakdown: PropVal,
+ steps: Vec,
+}
+
+#[derive(Deserialize)]
+struct Args {
+ num_steps: usize,
+ conversion_window_limit: u64, // In seconds
+ breakdown_attribution_type: String,
+ funnel_order_type: String,
+ prop_vals: Vec,
+ value: Vec,
+}
+
+#[derive(Serialize)]
+struct Result(i8, PropVal, Vec, Vec>);
+
+struct Vars {
+ max_step: (usize, EnteredTimestamp),
+ event_uuids: Vec>,
+ entered_timestamp: Vec
+}
+
+struct AggregateFunnelRow {
+ breakdown_step: Option,
+ results: Vec,
+}
+
+const MAX_REPLAY_EVENTS: usize = 10;
+
+const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp {
+ timestamp: 0.0,
+ timings: vec![],
+ uuids: vec![],
+};
+
+pub fn process_line(line: &str) -> Value {
+ let args = parse_args(&line);
+ let mut aggregate_funnel_row = AggregateFunnelRow {
+ results: Vec::with_capacity(args.prop_vals.len()),
+ breakdown_step: Option::None,
+ };
+ let result = aggregate_funnel_row.calculate_funnel_from_user_events(&args);
+ json!({ "result": result })
+}
+
+#[inline(always)]
+fn parse_args(line: &str) -> Args {
+ serde_json::from_str(line).expect("Invalid JSON input")
+}
+
+impl AggregateFunnelRow {
+ #[inline(always)]
+ fn calculate_funnel_from_user_events(&mut self, args: &Args) -> &Vec {
+ if args.breakdown_attribution_type.starts_with("step_") {
+ self.breakdown_step = args.breakdown_attribution_type[5..].parse::().ok()
+ }
+
+ args.prop_vals.iter().for_each(|prop_val| self.loop_prop_val(args, prop_val));
+
+ &self.results
+ }
+
+ #[inline(always)]
+ fn loop_prop_val(&mut self, args: &Args, prop_val: &PropVal) {
+ let mut vars = Vars {
+ max_step: (0, DEFAULT_ENTERED_TIMESTAMP.clone()),
+ event_uuids: repeat(Vec::new()).take(args.num_steps).collect(),
+ entered_timestamp: vec![DEFAULT_ENTERED_TIMESTAMP.clone(); args.num_steps + 1]
+ };
+
+ let filtered_events = args.value.iter()
+ .filter(|e| {
+ if args.breakdown_attribution_type == "all_events" {
+ e.breakdown == *prop_val
+ } else {
+ true
+ }
+ })
+ .group_by(|e| e.timestamp);
+
+ for (timestamp, events_with_same_timestamp) in &filtered_events {
+ let events_with_same_timestamp: Vec<_> = events_with_same_timestamp.collect();
+ vars.entered_timestamp[0] = EnteredTimestamp {
+ timestamp,
+ timings: vec![],
+ uuids: vec![],
+ };
+
+ if events_with_same_timestamp.len() == 1 {
+ if !self.process_event(
+ args,
+ &mut vars,
+ &events_with_same_timestamp[0],
+ prop_val,
+ false
+ ) {
+ return;
+ }
+ } else if events_with_same_timestamp.iter().map(|x| &x.steps).all_equal() {
+ // Deal with the most common case where they are all the same event (order doesn't matter)
+ for event in events_with_same_timestamp {
+ if !self.process_event(
+ args,
+ &mut vars,
+ event,
+ prop_val,
+ false
+ ) {
+ return;
+ }
+ }
+ } else {
+ // Handle permutations for different events with the same timestamp
+ // We ignore strict steps and exclusions in this case
+ // The behavior here is mostly dictated by how it was handled in the old style
+
+ let sorted_events = events_with_same_timestamp
+ .iter()
+ .flat_map(|&event| {
+ event.steps
+ .iter()
+ .filter(|&&step| step > 0)
+ .map(|&step| Event { steps: vec![step], ..event.clone() })
+ }).sorted_by_key(|event| event.steps[0]);
+
+ // Run exclusions, if they exist, then run matching events.
+ for event in sorted_events {
+ if !self.process_event(
+ args,
+ &mut vars,
+ &event,
+ &prop_val,
+ true
+ ) {
+ return;
+ }
+ }
+ }
+
+ // If we hit the goal, we can terminate early
+ if vars.entered_timestamp[args.num_steps].timestamp > 0.0 {
+ break;
+ }
+ }
+
+ // Find the furthest step we have made it to and print it
+ let final_index = vars.max_step.0;
+ let final_value = &vars.max_step.1;
+
+ for i in 0..final_index {
+ //if event_uuids[i].len() >= MAX_REPLAY_EVENTS && !event_uuids[i].contains(&final_value.uuids[i]) {
+ // Always put the actual event uuids first, we use it to extract timestamps
+ // This might create duplicates, but that's fine (we can remove it in clickhouse)
+ vars.event_uuids[i].insert(0, final_value.uuids[i].clone());
+ }
+ self.results.push(Result(
+ final_index as i8 - 1,
+ prop_val.clone(),
+ final_value.timings.windows(2).map(|w| w[1] - w[0]).collect(),
+ vars.event_uuids,
+ ))
+ }
+
+ #[inline(always)]
+ fn process_event(
+ &mut self,
+ args: &Args,
+ vars: &mut Vars,
+ event: &Event,
+ prop_val: &PropVal,
+ processing_multiple_events: bool
+ ) -> bool {
+ for step in event.steps.iter().rev() {
+ let mut exclusion = false;
+ let step = (if *step < 0 {
+ exclusion = true;
+ -*step
+ } else {
+ *step
+ }) as usize;
+
+ let in_match_window = (event.timestamp - vars.entered_timestamp[step - 1].timestamp) <= args.conversion_window_limit as f64;
+ let already_reached_this_step = vars.entered_timestamp[step].timestamp == vars.entered_timestamp[step - 1].timestamp
+ && vars.entered_timestamp[step].timestamp != 0.0;
+
+ if in_match_window && !already_reached_this_step {
+ if exclusion {
+ self.results.push(Result(-1, prop_val.clone(), vec![], vec![]));
+ return false;
+ }
+ let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown;
+ let already_used_event = processing_multiple_events && vars.entered_timestamp[step-1].uuids.contains(&event.uuid);
+ if !is_unmatched_step_attribution && !already_used_event {
+ vars.entered_timestamp[step] = EnteredTimestamp {
+ timestamp: vars.entered_timestamp[step - 1].timestamp,
+ timings: {
+ let mut timings = vars.entered_timestamp[step - 1].timings.clone();
+ timings.push(event.timestamp);
+ timings
+ },
+ uuids: {
+ let mut uuids = vars.entered_timestamp[step - 1].uuids.clone();
+ uuids.push(event.uuid);
+ uuids
+ },
+ };
+ if vars.event_uuids[step - 1].len() < MAX_REPLAY_EVENTS - 1 {
+ vars.event_uuids[step - 1].push(event.uuid);
+ }
+ if step > vars.max_step.0 {
+ vars.max_step = (step, vars.entered_timestamp[step].clone());
+ }
+ }
+ }
+ }
+
+ // If a strict funnel, clear all of the steps that we didn't match to
+ // If we are processing multiple events, skip this step, because ordering makes it complicated
+ if !processing_multiple_events && args.funnel_order_type == "strict" {
+ for i in 1..vars.entered_timestamp.len() {
+ if !event.steps.contains(&(i as i8)) {
+ vars.entered_timestamp[i] = DEFAULT_ENTERED_TIMESTAMP;
+ }
+ }
+ }
+
+ true
+ }
+}
\ No newline at end of file
diff --git a/funnel-udf/src/trends.rs b/funnel-udf/src/trends.rs
new file mode 100644
index 0000000000000..0b9cdd259e247
--- /dev/null
+++ b/funnel-udf/src/trends.rs
@@ -0,0 +1,193 @@
+use std::collections::HashMap;
+use std::str::FromStr;
+use itertools::Itertools;
+use serde::{Deserialize, Serialize};
+use serde_json::{json, Value};
+use crate::PropVal;
+
+fn deserialize_number_from_string<'de, D>(deserializer: D) -> Result
+where
+ D: serde::Deserializer<'de>,
+{
+ let s = String::deserialize(deserializer)?;
+ u64::from_str(&s).map_err(serde::de::Error::custom)
+}
+
+#[derive(Clone, Deserialize)]
+struct EnteredTimestamp {
+ timestamp: f64,
+ timings: Vec,
+}
+
+#[derive(Clone, Deserialize)]
+struct Event {
+ timestamp: f64,
+ #[serde(deserialize_with = "deserialize_number_from_string")]
+ interval_start: u64,
+ breakdown: PropVal,
+ steps: Vec,
+}
+
+#[derive(Deserialize)]
+struct Args {
+ from_step: usize,
+ num_steps: usize,
+ conversion_window_limit: u64, // In seconds
+ breakdown_attribution_type: String,
+ funnel_order_type: String,
+ prop_vals: Vec,
+ value: Vec,
+}
+
+#[derive(Serialize)]
+struct ResultStruct(u64, i8, PropVal);
+
+struct Vars {
+ interval_start_to_entered_timestamps: HashMap>,
+}
+
+struct AggregateFunnelRow {
+ breakdown_step: Option,
+ results: HashMap,
+}
+
+const DEFAULT_ENTERED_TIMESTAMP: EnteredTimestamp = EnteredTimestamp {
+ timestamp: 0.0,
+ timings: vec![],
+};
+
+pub fn process_line(line: &str) -> Value {
+ let args = parse_args(&line);
+ let mut aggregate_funnel_row = AggregateFunnelRow {
+ results: HashMap::new(),
+ breakdown_step: Option::None,
+ };
+ aggregate_funnel_row.calculate_funnel_from_user_events(&args);
+ let result: Vec = aggregate_funnel_row.results.into_values().collect();
+ json!({ "result": result })
+}
+
+#[inline(always)]
+fn parse_args(line: &str) -> Args {
+ serde_json::from_str(line).expect("Invalid JSON input")
+}
+
+impl AggregateFunnelRow {
+ #[inline(always)]
+ fn calculate_funnel_from_user_events(&mut self, args: &Args) {
+ if args.breakdown_attribution_type.starts_with("step_") {
+ self.breakdown_step = args.breakdown_attribution_type[5..].parse::().ok()
+ }
+
+ args.prop_vals.iter().for_each(|prop_val| self.loop_prop_val(args, prop_val));
+ }
+
+ #[inline(always)]
+ fn loop_prop_val(&mut self, args: &Args, prop_val: &PropVal) {
+ let mut vars = Vars {
+ interval_start_to_entered_timestamps: HashMap::new(),
+ };
+
+ let filtered_events = args.value.iter()
+ .filter(|e| {
+ if args.breakdown_attribution_type == "all_events" {
+ e.breakdown == *prop_val
+ } else {
+ true
+ }
+ })
+ .group_by(|e| e.timestamp);
+
+ for (_timestamp, events_with_same_timestamp) in &filtered_events {
+ let events_with_same_timestamp: Vec<_> = events_with_same_timestamp.collect();
+ for event in events_with_same_timestamp {
+ if !self.process_event(
+ args,
+ &mut vars,
+ &event,
+ prop_val,
+ ) {
+ return
+ }
+ }
+ }
+
+
+ // At this point, everything left in entered_timestamps is a failure, if it has made it to from_step
+ for entered_timestamp in vars.interval_start_to_entered_timestamps.values() {
+ if !self.results.contains_key(&(entered_timestamp[0].timestamp as u64)) && entered_timestamp[0].timings.len() > 0 {
+ self.results.insert(entered_timestamp[0].timestamp as u64, ResultStruct(entered_timestamp[0].timestamp as u64, -1, prop_val.clone() ));
+ }
+ }
+ }
+
+ #[inline(always)]
+ fn process_event(
+ &mut self,
+ args: &Args,
+ vars: &mut Vars,
+ event: &Event,
+ prop_val: &PropVal,
+ ) -> bool {
+ for step in event.steps.iter().rev() {
+ let mut exclusion = false;
+ let step = (if *step < 0 {
+ exclusion = true;
+ -*step
+ } else {
+ *step
+ }) as usize;
+
+ if step == 1 {
+ if !vars.interval_start_to_entered_timestamps.contains_key(&event.interval_start) && !self.results.contains_key(&event.interval_start) {
+ let mut entered_timestamp = vec![DEFAULT_ENTERED_TIMESTAMP.clone(); args.num_steps + 1];
+ entered_timestamp[0] = EnteredTimestamp { timestamp: event.interval_start as f64, timings: if args.from_step == 0 {vec![1.0]} else {vec![]} };
+ entered_timestamp[1] = EnteredTimestamp { timestamp: event.timestamp, timings: vec![event.timestamp] };
+ vars.interval_start_to_entered_timestamps.insert(event.interval_start, entered_timestamp);
+ }
+ } else {
+ for entered_timestamp in vars.interval_start_to_entered_timestamps.values_mut() {
+ let in_match_window = (event.timestamp - entered_timestamp[step - 1].timestamp) <= args.conversion_window_limit as f64;
+ let already_reached_this_step = entered_timestamp[step].timestamp == entered_timestamp[step - 1].timestamp;
+ if in_match_window && !already_reached_this_step {
+ if exclusion {
+ return false;
+ }
+ let is_unmatched_step_attribution = self.breakdown_step.map(|breakdown_step| step == breakdown_step - 1).unwrap_or(false) && *prop_val != event.breakdown;
+ if !is_unmatched_step_attribution {
+ entered_timestamp[step] = EnteredTimestamp {
+ timestamp: entered_timestamp[step - 1].timestamp,
+ timings: {
+ let mut timings = entered_timestamp[step - 1].timings.clone();
+ timings.push(event.timestamp);
+ timings
+ },
+ };
+ // check if we have hit the goal. if we have, remove it from the list and add it to the successful_timestamps
+ if entered_timestamp[args.num_steps].timestamp != 0.0 {
+ self.results.insert(
+ entered_timestamp[0].timestamp as u64,
+ ResultStruct(entered_timestamp[0].timestamp as u64, 1, prop_val.clone())
+ );
+ } else if step == args.from_step + 1 {
+ entered_timestamp[0].timings.push(1.0)
+ }
+ }
+ }
+ }
+ }
+ }
+ // If a strict funnel, clear all of the steps that we didn't match to
+ // If we are processing multiple events, skip this step, because ordering makes it complicated
+ if args.funnel_order_type == "strict" {
+ for entered_timestamp in vars.interval_start_to_entered_timestamps.values_mut() {
+ for i in 1..entered_timestamp.len() {
+ if !event.steps.contains(&(i as i8)) {
+ entered_timestamp[i] = DEFAULT_ENTERED_TIMESTAMP;
+ }
+ }
+ }
+ }
+ true
+ }
+}
\ No newline at end of file
diff --git a/latest_migrations.manifest b/latest_migrations.manifest
index ed247df68feb6..bd99d5cde082a 100644
--- a/latest_migrations.manifest
+++ b/latest_migrations.manifest
@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0016_rolemembership_organization_member
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
-posthog: 0486_cohort_last_error_at
+posthog: 0487_team_survey_config
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019
diff --git a/package.json b/package.json
index 910ebd8c5c2e9..4c4d7065bd400 100644
--- a/package.json
+++ b/package.json
@@ -82,6 +82,8 @@
"@react-hook/size": "^2.1.2",
"@rrweb/types": "2.0.0-alpha.13",
"@sentry/react": "7.112.1",
+ "@stripe/react-stripe-js": "^2.8.0",
+ "@stripe/stripe-js": "^4.5.0",
"@tailwindcss/container-queries": "^0.1.1",
"@testing-library/dom": ">=7.21.4",
"@tiptap/core": "^2.1.16",
diff --git a/plugin-server/src/cdp/hog-executor.ts b/plugin-server/src/cdp/hog-executor.ts
index 1d46f21bf7fe6..079ecacfd538f 100644
--- a/plugin-server/src/cdp/hog-executor.ts
+++ b/plugin-server/src/cdp/hog-executor.ts
@@ -18,8 +18,8 @@ import {
} from './types'
import { convertToHogFunctionFilterGlobal } from './utils'
-const MAX_ASYNC_STEPS = 2
-const MAX_HOG_LOGS = 10
+const MAX_ASYNC_STEPS = 5
+const MAX_HOG_LOGS = 25
const MAX_LOG_LENGTH = 10000
export const DEFAULT_TIMEOUT_MS = 100
@@ -291,7 +291,7 @@ export class HogExecutor {
functions: {
print: (...args) => {
hogLogs++
- if (hogLogs == MAX_HOG_LOGS) {
+ if (hogLogs === MAX_HOG_LOGS) {
result.logs.push({
level: 'warn',
timestamp: DateTime.now(),
diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts
index 0d58d565253b3..96b722fb9afdb 100644
--- a/plugin-server/tests/cdp/hog-executor.test.ts
+++ b/plugin-server/tests/cdp/hog-executor.test.ts
@@ -442,21 +442,24 @@ describe('Hog Executor', () => {
// Start the function
const result1 = executor.execute(invocation)
- // Run the response one time simulating a successful fetch
- setupFetchResponse(result1.invocation)
- const result2 = executor.execute(result1.invocation)
- expect(result2.finished).toBe(false)
- expect(result2.error).toBe(undefined)
- expect(result2.invocation.queue).toBe('fetch')
+
+ for (let i = 0; i < 4; i++) {
+ // Run the response one time simulating a successful fetch
+ setupFetchResponse(result1.invocation)
+ const result2 = executor.execute(result1.invocation)
+ expect(result2.finished).toBe(false)
+ expect(result2.error).toBe(undefined)
+ expect(result2.invocation.queue).toBe('fetch')
+ }
// This time we should see an error for hitting the loop limit
- setupFetchResponse(result2.invocation)
+ setupFetchResponse(result1.invocation)
const result3 = executor.execute(result1.invocation)
expect(result3.finished).toBe(true)
- expect(result3.error).toEqual('Exceeded maximum number of async steps: 2')
+ expect(result3.error).toEqual('Exceeded maximum number of async steps: 5')
expect(result3.logs.map((log) => log.message)).toEqual([
'Resuming function',
- 'Error executing function: HogVMException: Exceeded maximum number of async steps: 2',
+ 'Error executing function: HogVMException: Exceeded maximum number of async steps: 5',
])
})
})
@@ -489,6 +492,21 @@ describe('Hog Executor', () => {
'I AM FIBONACCI',
'I AM FIBONACCI',
'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
+ 'I AM FIBONACCI',
'Function exceeded maximum log entries. No more logs will be collected.',
expect.stringContaining(
'Error executing function: HogVMException: Execution timed out after 0.1 seconds. Performed'
@@ -519,7 +537,7 @@ describe('Hog Executor', () => {
])
})
- it('ignores events that have already used their posthogCapture', () => {
+ it('ignores events that have already used their postHogCapture', () => {
const fn = createHogFunction({
...HOG_EXAMPLES.posthog_capture,
...HOG_INPUTS_EXAMPLES.simple_fetch,
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 155c3a235f721..9db6b93b5bb8f 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -67,6 +67,12 @@ dependencies:
'@sentry/react':
specifier: 7.112.1
version: 7.112.1(react@18.2.0)
+ '@stripe/react-stripe-js':
+ specifier: ^2.8.0
+ version: 2.8.0(@stripe/stripe-js@4.5.0)(react-dom@18.2.0)(react@18.2.0)
+ '@stripe/stripe-js':
+ specifier: ^4.5.0
+ version: 4.5.0
'@tailwindcss/container-queries':
specifier: ^0.1.1
version: 0.1.1(tailwindcss@3.4.0)
@@ -7380,6 +7386,24 @@ packages:
file-system-cache: 2.3.0
dev: true
+ /@stripe/react-stripe-js@2.8.0(@stripe/stripe-js@4.5.0)(react-dom@18.2.0)(react@18.2.0):
+ resolution: {integrity: sha512-Vf1gNEuBxA9EtxiLghm2ZWmgbADNMJw4HW6eolUu0DON/6mZvWZgk0KHolN0sozNJwYp0i/8hBsDBcBUWcvnbw==}
+ peerDependencies:
+ '@stripe/stripe-js': ^1.44.1 || ^2.0.0 || ^3.0.0 || ^4.0.0
+ react: ^16.8.0 || ^17.0.0 || ^18.0.0
+ react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0
+ dependencies:
+ '@stripe/stripe-js': 4.5.0
+ prop-types: 15.8.1
+ react: 18.2.0
+ react-dom: 18.2.0(react@18.2.0)
+ dev: false
+
+ /@stripe/stripe-js@4.5.0:
+ resolution: {integrity: sha512-dMOzc58AOlsF20nYM/avzV8RFhO/vgYTY7ajLMH6mjlnZysnOHZxsECQvjEmL8Q/ukPwHkOnxSPW/QGCCnp7XA==}
+ engines: {node: '>=12.16'}
+ dev: false
+
/@sucrase/jest-plugin@3.0.0(jest@29.7.0)(sucrase@3.29.0):
resolution: {integrity: sha512-VRY6YKYImVWiRg1H3Yu24hwB1UPJDSDR62R/n+lOHR3+yDrfHEIAoddJivblMYN6U3vD+ndfTSrecZ9Jl+iGNw==}
peerDependencies:
@@ -18334,7 +18358,7 @@ packages:
react: '>=15'
dependencies:
react: 18.2.0
- unlayer-types: 1.111.0
+ unlayer-types: 1.103.0
dev: false
/react-error-boundary@3.1.4(react@18.2.0):
@@ -20881,8 +20905,8 @@ packages:
resolution: {integrity: sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==}
engines: {node: '>= 10.0.0'}
- /unlayer-types@1.111.0:
- resolution: {integrity: sha512-CjdOROIUrZXrtkLUrInMvTucEWRdWWEmPleCOhwsDTwUXg9LocjzI6drgdYoia/lyyoYPfHOXYw5SxdJk7hlvw==}
+ /unlayer-types@1.103.0:
+ resolution: {integrity: sha512-aVZS7g5F6dWEoxc0dhSDqYYncu+LIMB/SerJi6u5FKVSfTWnzA2MTpjFCbGkOOi8rUiIOabeuEOfyO/WDnarJg==}
dev: false
/unpipe@1.0.0:
diff --git a/posthog/api/project.py b/posthog/api/project.py
index e517d20a3c826..af933e440b5a3 100644
--- a/posthog/api/project.py
+++ b/posthog/api/project.py
@@ -98,6 +98,7 @@ class Meta:
"session_recording_linked_flag", # Compat with TeamSerializer
"session_recording_network_payload_capture_config", # Compat with TeamSerializer
"session_replay_config", # Compat with TeamSerializer
+ "survey_config",
"access_control", # Compat with TeamSerializer
"week_start_day", # Compat with TeamSerializer
"primary_dashboard", # Compat with TeamSerializer
@@ -159,6 +160,7 @@ class Meta:
"session_recording_linked_flag",
"session_recording_network_payload_capture_config",
"session_replay_config",
+ "survey_config",
"access_control",
"week_start_day",
"primary_dashboard",
@@ -269,6 +271,37 @@ def update(self, instance: Project, validated_data: dict[str, Any]) -> Project:
team_before_update = team.__dict__.copy()
project_before_update = instance.__dict__.copy()
+ if "survey_config" in validated_data:
+ if team.survey_config is not None and validated_data.get("survey_config") is not None:
+ validated_data["survey_config"] = {
+ **team.survey_config,
+ **validated_data["survey_config"],
+ }
+
+ if validated_data.get("survey_config") is None:
+ del team_before_update["survey_config"]
+
+ survey_config_changes_between = dict_changes_between(
+ "Survey",
+ team_before_update.get("survey_config", {}),
+ validated_data.get("survey_config", {}),
+ use_field_exclusions=True,
+ )
+ if survey_config_changes_between:
+ log_activity(
+ organization_id=cast(UUIDT, instance.organization_id),
+ team_id=instance.pk,
+ user=cast(User, self.context["request"].user),
+ was_impersonated=is_impersonated_session(request),
+ scope="Survey",
+ item_id="#",
+ activity="updated",
+ detail=Detail(
+ name="Survey Config",
+ changes=survey_config_changes_between,
+ ),
+ )
+
if (
"session_replay_config" in validated_data
and validated_data["session_replay_config"] is not None
diff --git a/posthog/api/survey.py b/posthog/api/survey.py
index 4e3bb5356ef1b..b2faf7a419186 100644
--- a/posthog/api/survey.py
+++ b/posthog/api/survey.py
@@ -646,6 +646,12 @@ def activity(self, request: request.Request, **kwargs):
return activity_page_response(activity_page, limit, page, request)
+class SurveyConfigSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Team
+ fields = ["survey_config"]
+
+
class SurveyAPISerializer(serializers.ModelSerializer):
"""
Serializer for the exposed /api/surveys endpoint, to be used in posthog-js and for headless APIs.
@@ -732,7 +738,19 @@ def surveys(request: Request):
many=True,
).data
- return cors_response(request, JsonResponse({"surveys": surveys}))
+ serialized_survey_config: dict[str, Any] = {}
+ if team.survey_config is not None:
+ serialized_survey_config = SurveyConfigSerializer(team).data
+
+ return cors_response(
+ request,
+ JsonResponse(
+ {
+ "surveys": surveys,
+ "survey_config": serialized_survey_config.get("survey_config", None),
+ }
+ ),
+ )
@contextmanager
diff --git a/posthog/api/team.py b/posthog/api/team.py
index 182eec7b1a59a..c37054c3bedc5 100644
--- a/posthog/api/team.py
+++ b/posthog/api/team.py
@@ -104,6 +104,7 @@ class Meta:
"session_recording_linked_flag",
"session_recording_network_payload_capture_config",
"session_replay_config",
+ "survey_config",
"recording_domains",
"inject_web_apps",
"surveys_opt_in",
@@ -156,6 +157,7 @@ class Meta:
"session_recording_linked_flag",
"session_recording_network_payload_capture_config",
"session_replay_config",
+ "survey_config",
"effective_membership_level",
"access_control",
"week_start_day",
@@ -328,6 +330,38 @@ def create(self, validated_data: dict[str, Any], **kwargs) -> Team:
def update(self, instance: Team, validated_data: dict[str, Any]) -> Team:
before_update = instance.__dict__.copy()
+ if "survey_config" in validated_data:
+ if instance.survey_config is not None and validated_data.get("survey_config") is not None:
+ validated_data["survey_config"] = {
+ **instance.survey_config,
+ **validated_data["survey_config"],
+ }
+
+ if validated_data.get("survey_config") is None:
+ del before_update["survey_config"]
+
+ survey_config_changes_between = dict_changes_between(
+ "Survey",
+ before_update.get("survey_config", {}),
+ validated_data.get("survey_config", {}),
+ use_field_exclusions=True,
+ )
+
+ if survey_config_changes_between:
+ log_activity(
+ organization_id=cast(UUIDT, instance.organization_id),
+ team_id=instance.pk,
+ user=cast(User, self.context["request"].user),
+ was_impersonated=is_impersonated_session(request),
+ scope="Survey",
+ item_id="",
+ activity="updated",
+ detail=Detail(
+ name="Team Survey Config",
+ changes=survey_config_changes_between,
+ ),
+ )
+
if (
"session_replay_config" in validated_data
and validated_data["session_replay_config"] is not None
diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr
index e31bb6984783f..22b2c810b3c44 100644
--- a/posthog/api/test/__snapshots__/test_action.ambr
+++ b/posthog/api/test/__snapshots__/test_action.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -147,6 +148,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -468,6 +470,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr
index ebf0634a89a6e..4180dd11bd55c 100644
--- a/posthog/api/test/__snapshots__/test_annotation.ambr
+++ b/posthog/api/test/__snapshots__/test_annotation.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -115,6 +116,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -369,6 +371,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr
index 7d14f67211293..6c941c07a93cb 100644
--- a/posthog/api/test/__snapshots__/test_decide.ambr
+++ b/posthog/api/test/__snapshots__/test_decide.ambr
@@ -108,6 +108,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -173,6 +174,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -239,6 +241,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -386,6 +389,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -461,6 +465,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -592,6 +597,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -668,6 +674,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -765,6 +772,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -920,6 +928,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1017,6 +1026,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1178,6 +1188,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1292,6 +1303,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_early_access_feature.ambr b/posthog/api/test/__snapshots__/test_early_access_feature.ambr
index ca93bf4cc878d..d56fba9679c00 100644
--- a/posthog/api/test/__snapshots__/test_early_access_feature.ambr
+++ b/posthog/api/test/__snapshots__/test_early_access_feature.ambr
@@ -26,6 +26,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -168,6 +169,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr
index b270c6665a3ea..add01fdba35fa 100644
--- a/posthog/api/test/__snapshots__/test_element.ambr
+++ b/posthog/api/test/__snapshots__/test_element.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr
index 7be6faab8299e..db27b28fd8ee2 100644
--- a/posthog/api/test/__snapshots__/test_feature_flag.ambr
+++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr
@@ -467,6 +467,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -675,6 +676,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1043,6 +1045,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1184,6 +1187,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1484,6 +1488,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1601,6 +1606,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1672,6 +1678,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1736,6 +1743,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr
index ab131ee337104..b9f4801dcbfe0 100644
--- a/posthog/api/test/__snapshots__/test_insight.ambr
+++ b/posthog/api/test/__snapshots__/test_insight.ambr
@@ -700,6 +700,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -757,6 +758,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -890,6 +892,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1134,6 +1137,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1286,6 +1290,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1424,6 +1429,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1541,6 +1547,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1693,6 +1700,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1785,6 +1793,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1876,6 +1885,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1940,6 +1950,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
index 91cb4c5b70902..cae9ed3e59c63 100644
--- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
+++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr
@@ -107,6 +107,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -219,6 +220,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -311,6 +313,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -375,6 +378,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -517,6 +521,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -581,6 +586,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -673,6 +679,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -737,6 +744,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -893,6 +901,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -980,6 +989,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1423,6 +1433,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2116,6 +2127,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_preflight.ambr b/posthog/api/test/__snapshots__/test_preflight.ambr
index 2e4c27a3fa1ba..e6642ac4aee62 100644
--- a/posthog/api/test/__snapshots__/test_preflight.ambr
+++ b/posthog/api/test/__snapshots__/test_preflight.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr
index 10f642ef9affd..f20862f313aac 100644
--- a/posthog/api/test/__snapshots__/test_survey.ambr
+++ b/posthog/api/test/__snapshots__/test_survey.ambr
@@ -173,6 +173,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index fbfa40cef3d19..911d8d728be53 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -193,6 +194,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -392,6 +394,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -712,6 +715,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1096,6 +1100,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1160,6 +1165,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1327,6 +1333,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1384,6 +1391,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1517,6 +1525,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1633,6 +1642,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1837,6 +1847,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2057,6 +2068,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2149,6 +2161,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2240,6 +2253,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2304,6 +2318,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2400,6 +2415,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2496,6 +2512,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2664,6 +2681,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2777,6 +2795,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2869,6 +2888,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2960,6 +2980,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3024,6 +3045,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3096,6 +3118,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3229,6 +3252,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3345,6 +3369,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3557,6 +3582,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3756,6 +3782,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3918,6 +3945,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4101,6 +4129,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4568,6 +4597,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4685,6 +4715,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4861,6 +4892,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4998,6 +5030,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5101,6 +5134,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5192,6 +5226,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5256,6 +5291,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5320,6 +5356,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5453,6 +5490,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5543,6 +5581,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5607,6 +5646,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5740,6 +5780,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5856,6 +5897,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6048,6 +6090,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6161,6 +6204,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6267,6 +6311,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6358,6 +6403,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6422,6 +6468,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6486,6 +6533,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6619,6 +6667,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6742,6 +6791,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6922,6 +6972,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7011,6 +7062,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7099,6 +7151,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7191,6 +7244,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7282,6 +7336,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7346,6 +7401,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7410,6 +7466,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7557,6 +7614,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7673,6 +7731,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7864,6 +7923,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8063,6 +8123,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8243,6 +8304,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8451,6 +8513,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8547,6 +8610,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8723,6 +8787,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -8914,6 +8979,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -9031,6 +9097,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -9207,6 +9274,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -9495,6 +9563,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index a2c541d95986d..d59c1815c0042 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -58,6 +58,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -147,6 +148,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -420,6 +422,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -521,6 +524,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py
index e77e41d2e489b..4f171d91b6c14 100644
--- a/posthog/api/test/test_survey.py
+++ b/posthog/api/test/test_survey.py
@@ -12,7 +12,7 @@
from posthog.api.survey import nh3_clean_with_allow_list
from posthog.constants import AvailableFeature
-from posthog.models import Action, FeatureFlag
+from posthog.models import Action, FeatureFlag, Team
from posthog.models.cohort.cohort import Cohort
from posthog.models.feedback.survey import Survey
from posthog.test.base import (
@@ -2573,6 +2573,25 @@ def _get_surveys(
REMOTE_ADDR=ip,
)
+ def test_can_get_survey_config(self):
+ survey_appearance = {
+ "thankYouMessageHeader": "Thanks for your feedback!",
+ "thankYouMessageDescription": "We'll use it to make notebooks better",
+ }
+ self.team.survey_config = {"appearance": survey_appearance}
+
+ self.team.save()
+
+ self.team = Team.objects.get(id=self.team.id)
+
+ self.client.logout()
+ response = self._get_surveys()
+ response_data = response.json()
+ assert response.status_code == status.HTTP_200_OK, response_data
+ assert response.status_code == status.HTTP_200_OK, response_data
+ assert response_data["survey_config"] is not None
+ assert response_data["survey_config"]["appearance"] == survey_appearance
+
def test_list_surveys_with_actions(self):
action = Action.objects.create(
team=self.team,
diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py
index 405197ecc637d..74a5995d2ac5b 100644
--- a/posthog/api/test/test_team.py
+++ b/posthog/api/test/test_team.py
@@ -887,6 +887,27 @@ def test_can_set_and_unset_session_recording_network_payload_capture_config(self
second_get_response = self.client.get("/api/environments/@current/")
assert second_get_response.json()["session_recording_network_payload_capture_config"] is None
+ def test_can_set_and_unset_survey_settings(self):
+ survey_appearance = {
+ "thankYouMessageHeader": "Thanks for your feedback!",
+ "thankYouMessageDescription": "We'll use it to make notebooks better",
+ "backgroundColor": "#ffcc99",
+ }
+
+ self._patch_config("survey_config", {"appearance": survey_appearance})
+ self._assert_surveys_config_is({"appearance": survey_appearance})
+
+ survey_appearance["zIndex"] = "100001"
+ self._patch_config("survey_config", {"appearance": survey_appearance})
+ self._assert_surveys_config_is({"appearance": survey_appearance})
+
+ survey_appearance["thankYouMessageHeader"] = "Thanks!"
+ self._patch_config("survey_config", {"appearance": survey_appearance})
+ self._assert_surveys_config_is({"appearance": survey_appearance})
+
+ self._patch_config("survey_config", None)
+ self._assert_replay_config_is(None)
+
def test_can_set_and_unset_session_replay_config(self) -> None:
# can set
self._patch_session_replay_config({"record_canvas": True})
@@ -1054,23 +1075,34 @@ def test_can_complete_product_onboarding(
)
def _assert_replay_config_is(self, expected: dict[str, Any] | None) -> HttpResponse:
+ return self._assert_config_is("session_replay_config", expected)
+
+ def _assert_surveys_config_is(self, expected: dict[str, Any] | None) -> HttpResponse:
+ return self._assert_config_is("survey_config", expected)
+
+ def _assert_config_is(self, config_name, expected: dict[str, Any] | None) -> HttpResponse:
get_response = self.client.get("/api/environments/@current/")
assert get_response.status_code == status.HTTP_200_OK, get_response.json()
- assert get_response.json()["session_replay_config"] == expected
+ assert get_response.json()[config_name] == expected
return get_response
- def _patch_session_replay_config(
- self, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK
+ def _patch_config(
+ self, config_name, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK
) -> HttpResponse:
patch_response = self.client.patch(
"/api/environments/@current/",
- {"session_replay_config": config},
+ {config_name: config},
)
assert patch_response.status_code == expected_status, patch_response.json()
return patch_response
+ def _patch_session_replay_config(
+ self, config: dict[str, Any] | None, expected_status: int = status.HTTP_200_OK
+ ) -> HttpResponse:
+ return self._patch_config("session_replay_config", config, expected_status)
+
def _assert_linked_flag_config(self, expected_config: dict | None) -> HttpResponse:
response = self.client.get("/api/environments/@current/")
assert response.status_code == status.HTTP_200_OK
diff --git a/posthog/cdp/templates/helpers.py b/posthog/cdp/templates/helpers.py
index 093c6ecbdb4ca..726207d93ca6b 100644
--- a/posthog/cdp/templates/helpers.py
+++ b/posthog/cdp/templates/helpers.py
@@ -23,7 +23,7 @@ def setUp(self):
side_effect=lambda *args: print("[DEBUG HogFunctionFetch]", *args) or self.mock_fetch_response(*args) # noqa: T201
)
self.mock_posthog_capture = MagicMock(
- side_effect=lambda *args: print("[DEBUG HogFunctionPosthogCapture]", *args) # noqa: T201
+ side_effect=lambda *args: print("[DEBUG HogFunctionPostHogCapture]", *args) # noqa: T201
)
mock_fetch_response = lambda *args: {"status": 200, "body": {}}
diff --git a/posthog/cdp/templates/webhook/template_webhook.py b/posthog/cdp/templates/webhook/template_webhook.py
index cc3eb09746277..23ee39cb542cb 100644
--- a/posthog/cdp/templates/webhook/template_webhook.py
+++ b/posthog/cdp/templates/webhook/template_webhook.py
@@ -9,16 +9,21 @@
icon_url="/static/posthog-icon.svg",
category=["Custom"],
hog="""
-let res := fetch(inputs.url, {
+let payload := {
'headers': inputs.headers,
'body': inputs.body,
'method': inputs.method
-});
+}
if (inputs.debug) {
- print('Response', res.status, res.body);
+ print('Request', inputs.url, payload)
}
+let res := fetch(inputs.url, payload);
+
+if (inputs.debug) {
+ print('Response', res.status, res.body);
+}
""".strip(),
inputs_schema=[
{
diff --git a/posthog/cdp/templates/webhook/test_template_webhook.py b/posthog/cdp/templates/webhook/test_template_webhook.py
index 3f2b2c0d80c17..daf34e6877dd9 100644
--- a/posthog/cdp/templates/webhook/test_template_webhook.py
+++ b/posthog/cdp/templates/webhook/test_template_webhook.py
@@ -36,4 +36,9 @@ def test_prints_when_debugging(self):
assert self.get_mock_fetch_calls()[0] == snapshot(
("https://posthog.com", {"headers": {}, "body": {"hello": "world"}, "method": "GET"})
)
- assert self.get_mock_print_calls() == snapshot([("Response", 200, {})])
+ assert self.get_mock_print_calls() == snapshot(
+ [
+ ("Request", "https://posthog.com", {"headers": {}, "body": {"hello": "world"}, "method": "GET"}),
+ ("Response", 200, {}),
+ ]
+ )
diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py
index 08bee305b933e..0b1830e21a264 100644
--- a/posthog/hogql/functions/mapping.py
+++ b/posthog/hogql/functions/mapping.py
@@ -388,7 +388,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy
"reinterpretAsUUID": HogQLFunctionMeta("reinterpretAsUUID", 1, 1),
"toInt": HogQLFunctionMeta("accurateCastOrNull", 1, 1, suffix_args=[ast.Constant(value="Int64")]),
"_toInt64": HogQLFunctionMeta("toInt64", 1, 1),
- "_toUInt64": HogQLFunctionMeta("toUInt64", 1, 1),
+ "_toUInt64": HogQLFunctionMeta("toUInt64", 1, 1, signatures=[((UnknownType(),), IntegerType())]),
"_toUInt128": HogQLFunctionMeta("toUInt128", 1, 1),
"toFloat": HogQLFunctionMeta("accurateCastOrNull", 1, 1, suffix_args=[ast.Constant(value="Float64")]),
"toDecimal": HogQLFunctionMeta("accurateCastOrNull", 1, 1, suffix_args=[ast.Constant(value="Decimal64")]),
diff --git a/posthog/hogql/metadata.py b/posthog/hogql/metadata.py
index 1568d0bf766f7..34730b7b1a19b 100644
--- a/posthog/hogql/metadata.py
+++ b/posthog/hogql/metadata.py
@@ -42,10 +42,10 @@ def get_hogql_metadata(
)
if query.language == HogLanguage.HOG:
program = parse_program(query.query)
- create_bytecode(program, supported_functions={"fetch", "posthogCapture"}, args=[], context=context)
+ create_bytecode(program, supported_functions={"fetch", "postHogCapture"}, args=[], context=context)
elif query.language == HogLanguage.HOG_TEMPLATE:
string = parse_string_template(query.query)
- create_bytecode(string, supported_functions={"fetch", "posthogCapture"}, args=[], context=context)
+ create_bytecode(string, supported_functions={"fetch", "postHogCapture"}, args=[], context=context)
elif query.language == HogLanguage.HOG_QL_EXPR:
node = parse_expr(query.query)
if query.sourceQuery is not None:
diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py
index 2f928e014daf4..d171249520031 100644
--- a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py
+++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py
@@ -4,6 +4,7 @@
from posthog.hogql.constants import HogQLQuerySettings
from posthog.hogql.parser import parse_select
from posthog.hogql_queries.insights.funnels import FunnelTrends
+from posthog.hogql_queries.insights.funnels.funnel_udf import udf_event_array_filter
from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str
from posthog.schema import BreakdownType, BreakdownAttributionType
from posthog.utils import DATERANGE_MAP
@@ -78,6 +79,7 @@ def get_query(self) -> ast.SelectQuery:
parse_select(
f"""
SELECT
+ arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), _toUInt64(toDateTime({get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')})), {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}])))) as events_array,
arrayJoin({fn}(
{from_step},
{max_steps},
@@ -85,9 +87,9 @@ def get_query(self) -> ast.SelectQuery:
'{breakdown_attribution_string}',
'{self.context.funnelsFilter.funnelOrderType}',
{prop_vals},
- arraySort(t -> t.1, groupArray(tuple(toFloat(timestamp), {get_start_of_interval_hogql_str(self.context.interval.value, team=self.context.team, source='timestamp')}, {prop_selector}, arrayFilter((x) -> x != 0, [{steps}{exclusions}]))))
+ {udf_event_array_filter(self.context.funnelsFilter.funnelOrderType)}
)) as af_tuple,
- toTimeZone(af_tuple.1, '{self.context.team.timezone}') as entrance_period_start,
+ toTimeZone(toDateTime(_toUInt64(af_tuple.1)), '{self.context.team.timezone}') as entrance_period_start,
af_tuple.2 as success_bool,
af_tuple.3 as breakdown
FROM {{inner_event_query}}
diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py
index e867975d04f45..a9a3ef9e3f5c4 100644
--- a/posthog/hogql_queries/insights/funnels/funnel_udf.py
+++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py
@@ -3,13 +3,28 @@
from posthog.hogql import ast
from posthog.hogql.parser import parse_select, parse_expr
from posthog.hogql_queries.insights.funnels.base import FunnelBase
-from posthog.schema import BreakdownType, BreakdownAttributionType
+from posthog.schema import BreakdownType, BreakdownAttributionType, StepOrderValue
from posthog.utils import DATERANGE_MAP
TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S"
HUMAN_READABLE_TIMESTAMP_FORMAT = "%-d-%b-%Y"
+# This is used to reduce the number of events we look at in strict funnels
+# We remove a non-matching event if there was already one before it (that don't have the same timestamp)
+# arrayRotateRight turns [1,2,3] into [3,1,2]
+# For some reason, this uses much less memory than using indexing in clickhouse to check the previous element
+def udf_event_array_filter(funnelOrderType: StepOrderValue | None):
+ if funnelOrderType == "strict":
+ return f"""
+ arrayFilter(
+ (x, x2) -> not (empty(x.4) and empty(x2.4) and x.3 == x2.3 and x.1 > x2.1),
+ events_array,
+ arrayRotateRight(events_array, 1))
+ """
+ return "events_array"
+
+
class FunnelUDF(FunnelBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@@ -37,20 +52,6 @@ def conversion_window_limit(self) -> int:
self.context.funnelWindowInterval * DATERANGE_MAP[self.context.funnelWindowIntervalUnit].total_seconds()
)
- # This is used to reduce the number of events we look at in strict funnels
- # We remove a non-matching event if there was already one before it (that don't have the same timestamp)
- # arrayRotateRight turns [1,2,3] into [3,1,2]
- # For some reason, this uses much less memory than using indexing in clickhouse to check the previous element
- def _array_filter(self):
- if self.context.funnelsFilter.funnelOrderType == "strict":
- return f"""
- arrayFilter(
- (x, x2) -> not (empty(x.4) and empty(x2.4) and x.1 > x2.1),
- events_array,
- arrayRotateRight(events_array, 1))
- """
- return "events_array"
-
# This is the function that calls the UDF
# This is used by both the query itself and the actors query
def _inner_aggregation_query(self):
@@ -114,7 +115,7 @@ def matched_event_arrays_selects():
'{breakdown_attribution_string}',
'{self.context.funnelsFilter.funnelOrderType}',
{prop_vals},
- {self._array_filter()}
+ {udf_event_array_filter(self.context.funnelsFilter.funnelOrderType)}
)) as af_tuple,
af_tuple.1 as step_reached,
af_tuple.1 + 1 as steps, -- Backward compatibility
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
index 7432cde6693bb..2490d1b6aa554 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
@@ -389,7 +389,8 @@
breakdown AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -446,7 +447,8 @@
first_timestamp
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -500,7 +502,8 @@
FROM
(SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp
FROM
- (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target
+ (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target
FROM
(SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1
FROM events AS e
@@ -573,7 +576,8 @@
first_timestamp
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -627,7 +631,8 @@
FROM
(SELECT aggregation_target AS actor_id, matched_events_array[plus(step_reached, 1)] AS matching_events, (matched_events_array[1][1]).1 AS timestamp, nullIf((matched_events_array[2][1]).1, 0) AS final_timestamp, (matched_events_array[1][1]).1 AS first_timestamp, steps AS steps, final_timestamp, first_timestamp
FROM
- (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target
+ (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array, arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple, af_tuple.1 AS step_reached, plus(af_tuple.1, 1) AS steps, af_tuple.2 AS breakdown, af_tuple.3 AS timings, af_tuple.4 AS matched_event_uuids_array_array, groupArray(tuple(timestamp, uuid, `$session_id`, `$window_id`)) AS user_events, mapFromArrays(arrayMap(x -> x.2, user_events), user_events) AS user_events_map, arrayMap(matched_event_uuids_array -> arrayMap(event_uuid -> user_events_map[event_uuid], arrayDistinct(matched_event_uuids_array)), matched_event_uuids_array_array) AS matched_events_array, aggregation_target AS aggregation_target
FROM
(SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp, if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id) AS aggregation_target, e.uuid AS uuid, e.`$session_id` AS `$session_id`, e.`$window_id` AS `$window_id`, if(equals(e.event, '$pageview'), 1, 0) AS step_0, if(equals(e.event, 'insight analyzed'), 1, 0) AS step_1
FROM events AS e
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
index 031aa7363bed0..118e756719797 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
@@ -9,7 +9,8 @@
matched_events_array[1] AS matching_events
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -83,7 +84,8 @@
matched_events_array[2] AS matching_events
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -157,7 +159,8 @@
matched_events_array[2] AS matching_events
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(3, 1209600, 'first_touch', 'strict', [[]], arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
index d12ffea2f6bdd..a57dc6d44d3ea 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
@@ -15,7 +15,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -84,7 +85,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'step_1', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -160,7 +162,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, ['Other']) AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1)])))) AS events_array,
- arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_array_v0(2, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -234,7 +237,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -311,7 +315,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
@@ -388,7 +393,8 @@
if(ifNull(less(row_number, 25), 0), breakdown, 'Other') AS final_prop
FROM
(SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), uuid, prop, arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
- arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
+ arrayJoin(aggregate_funnel_v0(3, 1209600, 'first_touch', 'strict', groupUniqArray(prop), arrayFilter((x, x2) -> not(and(empty(x.4), empty(x2.4), ifNull(equals(x.3, x2.3), isNull(x.3)
+ and isNull(x2.3)), ifNull(greater(x.1, x2.1), 0))), events_array, arrayRotateRight(events_array, 1)))) AS af_tuple,
af_tuple.1 AS step_reached,
plus(af_tuple.1, 1) AS steps,
af_tuple.2 AS breakdown,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr
index bded4b095c3f5..043f02570c02f 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr
@@ -7,8 +7,9 @@
if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate,
data.breakdown AS prop
FROM
- (SELECT arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple,
- toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start,
+ (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'UTC')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
+ arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple,
+ toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start,
af_tuple.2 AS success_bool,
af_tuple.3 AS breakdown
FROM
@@ -52,8 +53,9 @@
if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate,
data.breakdown AS prop
FROM
- (SELECT arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfDay(timestamp), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple,
- toTimeZone(af_tuple.1, 'US/Pacific') AS entrance_period_start,
+ (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfDay(timestamp), 'US/Pacific')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
+ arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple,
+ toTimeZone(toDateTime(toUInt64(af_tuple.1), 'US/Pacific'), 'US/Pacific') AS entrance_period_start,
af_tuple.2 AS success_bool,
af_tuple.3 AS breakdown
FROM
@@ -97,8 +99,9 @@
if(ifNull(greater(reached_from_step_count, 0), 0), round(multiply(divide(reached_to_step_count, reached_from_step_count), 100), 2), 0) AS conversion_rate,
data.breakdown AS prop
FROM
- (SELECT arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toStartOfWeek(timestamp, 0), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))))) AS af_tuple,
- toTimeZone(af_tuple.1, 'UTC') AS entrance_period_start,
+ (SELECT arraySort(t -> t.1, groupArray(tuple(accurateCastOrNull(timestamp, 'Float64'), toUInt64(toDateTime(toStartOfWeek(timestamp, 0), 'UTC')), [], arrayFilter(x -> ifNull(notEquals(x, 0), 1), [multiply(1, step_0), multiply(2, step_1), multiply(3, step_2)])))) AS events_array,
+ arrayJoin(aggregate_funnel_array_trends_v0(0, 3, 1209600, 'first_touch', 'ordered', [[]], events_array)) AS af_tuple,
+ toTimeZone(toDateTime(toUInt64(af_tuple.1), 'UTC'), 'UTC') AS entrance_period_start,
af_tuple.2 AS success_bool,
af_tuple.3 AS breakdown
FROM
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py
index 934f55babcc0b..6b6eb7d0f06db 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict_udf.py
@@ -4,9 +4,10 @@
from freezegun import freeze_time
from hogql_parser import parse_expr
-from posthog.constants import INSIGHT_FUNNELS
+from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType
from posthog.hogql.constants import HogQLGlobalSettings, MAX_BYTES_BEFORE_EXTERNAL_GROUP_BY
from posthog.hogql.query import execute_hogql_query
+from posthog.hogql_queries.insights.funnels.funnel_udf import udf_event_array_filter
from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner
from posthog.hogql_queries.insights.funnels.test.test_funnel_strict import (
BaseTestFunnelStrictStepsBreakdown,
@@ -15,7 +16,14 @@
BaseTestFunnelStrictStepsConversionTime,
)
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
-from posthog.schema import FunnelsQuery
+from posthog.schema import (
+ FunnelsQuery,
+ EventsNode,
+ BreakdownFilter,
+ FunnelsFilter,
+ BreakdownAttributionType,
+ StepOrderValue,
+)
from posthog.test.base import _create_person, _create_event
@@ -49,7 +57,9 @@ def test_redundant_event_filtering_strict_funnel(self):
query = cast(FunnelsQuery, filter_to_query(filters))
runner = FunnelsQueryRunner(query=query, team=self.team)
inner_aggregation_query = runner.funnel_class._inner_aggregation_query()
- inner_aggregation_query.select.append(parse_expr(f"{runner.funnel_class._array_filter()} AS filtered_array"))
+ inner_aggregation_query.select.append(
+ parse_expr(f"{udf_event_array_filter(StepOrderValue.STRICT)} AS filtered_array")
+ )
inner_aggregation_query.having = None
response = execute_hogql_query(
query_type="FunnelsQuery",
@@ -64,6 +74,43 @@ def test_redundant_event_filtering_strict_funnel(self):
# Make sure the events have been condensed down to one
self.assertEqual(1, len(response.results[0][-1]))
+ def test_different_prop_val_in_strict_filter(self):
+ funnels_query = FunnelsQuery(
+ series=[EventsNode(event="first"), EventsNode(event="second")],
+ breakdownFilter=BreakdownFilter(breakdown="bd"),
+ funnelsFilter=FunnelsFilter(funnelOrderType=FunnelOrderType.STRICT),
+ )
+
+ _create_person(
+ distinct_ids=["many_other_events"],
+ team_id=self.team.pk,
+ properties={"test": "okay"},
+ )
+ _create_event(team=self.team, event="first", distinct_id="many_other_events", properties={"bd": "one"})
+ _create_event(team=self.team, event="first", distinct_id="many_other_events", properties={"bd": "two"})
+ _create_event(team=self.team, event="unmatched", distinct_id="many_other_events", properties={"bd": "one"})
+ _create_event(team=self.team, event="unmatched", distinct_id="many_other_events", properties={"bd": "two"})
+ _create_event(team=self.team, event="second", distinct_id="many_other_events", properties={"bd": "one"})
+ _create_event(team=self.team, event="second", distinct_id="many_other_events", properties={"bd": "two"})
+
+ # First Touchpoint (just "one")
+ results = FunnelsQueryRunner(query=funnels_query, team=self.team).calculate().results
+
+ assert 2 == len(results[0])
+ assert results[0][-1]["count"] == 0
+ assert all(result["breakdown"] == ["one"] for result in results[0])
+
+ # All events attribution
+ assert funnels_query.funnelsFilter is not None
+ funnels_query.funnelsFilter.breakdownAttributionType = BreakdownAttributionType.ALL_EVENTS
+ results = FunnelsQueryRunner(query=funnels_query, team=self.team).calculate().results
+
+ assert 2 == len(results)
+ one = next(x for x in results if x[0]["breakdown"] == ["one"])
+ assert one[-1]["count"] == 0
+ two = next(x for x in results if x[0]["breakdown"] == ["two"])
+ assert two[-1]["count"] == 0
+
def test_multiple_events_same_timestamp_doesnt_blow_up(self):
_create_person(distinct_ids=["test"], team_id=self.team.pk)
with freeze_time("2024-01-10T12:01:00"):
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py
index d7f8d326e2a7d..cb1ab3e0653fd 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends.py
@@ -2,7 +2,9 @@
from typing import cast
from zoneinfo import ZoneInfo
+
from freezegun.api import freeze_time
+from parameterized import parameterized
from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType
from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner
@@ -101,7 +103,11 @@ def test_no_event_in_period(self):
self.assertEqual(len(results), 7)
self.assertEqual(formatted_results[0]["days"][0], "2021-06-07")
- def test_only_one_user_reached_one_step(self):
+ @parameterized.expand(["US/Pacific", "UTC"])
+ def test_only_one_user_reached_one_step(self, timezone):
+ self.team.timezone = timezone
+ self.team.save()
+
journeys_for(
{"user a": [{"event": "step one", "timestamp": datetime(2021, 6, 7, 19)}]},
self.team,
@@ -133,43 +139,43 @@ def test_only_one_user_reached_one_step(self):
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 1,
- "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 7, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 8, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 9, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 10, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 11, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 12, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
{
"reached_to_step_count": 0,
"conversion_rate": 0.0,
"reached_from_step_count": 0,
- "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo("UTC")),
+ "timestamp": datetime(2021, 6, 13, 0, 0).replace(tzinfo=ZoneInfo(timezone)),
},
],
)
@@ -292,7 +298,11 @@ def test_week_interval(self):
self.assertEqual(2, len(results))
self.assertEqual([person["distinct_ids"] for person in persons], [["user_one"]])
- def test_month_interval(self):
+ @parameterized.expand(["US/Pacific", "UTC"])
+ def test_month_interval(self, timezone):
+ self.team.timezone = timezone
+ self.team.save()
+
filters = {
"insight": INSIGHT_FUNNELS,
"funnel_viz_type": "trends",
@@ -344,15 +354,15 @@ def test_month_interval(self):
"timestamp": date(2020, 3, 1),
},
{
- "conversion_rate": 0.0,
- "reached_from_step_count": 0,
- "reached_to_step_count": 0,
+ "conversion_rate": 100.0 if timezone == "US/Pacific" else 0.0,
+ "reached_from_step_count": 1 if timezone == "US/Pacific" else 0,
+ "reached_to_step_count": 1 if timezone == "US/Pacific" else 0,
"timestamp": date(2020, 4, 1),
},
{
- "conversion_rate": 100.0,
- "reached_from_step_count": 1,
- "reached_to_step_count": 1,
+ "conversion_rate": 100.0 if timezone == "UTC" else 0.0,
+ "reached_from_step_count": 1 if timezone == "UTC" else 0,
+ "reached_to_step_count": 1 if timezone == "UTC" else 0,
"timestamp": date(2020, 5, 1),
},
{
@@ -369,8 +379,8 @@ def test_month_interval(self):
},
],
)
-
- persons = self._get_actors_at_step(filters, "2020-05-01 00:00:00", False)
+ entrance_period_start = "2020-05-01 00:00:00" if timezone == "UTC" else "2020-04-01 00:00:00"
+ persons = self._get_actors_at_step(filters, entrance_period_start, False)
self.assertEqual([person["distinct_ids"] for person in persons], [["user_one"]])
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py
index 2829bb93ce7dc..d341f7c272be3 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_trends_udf.py
@@ -1,11 +1,23 @@
+import datetime
from typing import cast
from unittest.mock import patch, Mock
-from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR
+from posthog.constants import INSIGHT_FUNNELS, TRENDS_LINEAR, FunnelOrderType
from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner
from posthog.hogql_queries.insights.funnels.test.test_funnel_trends import BaseTestFunnelTrends
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
-from posthog.schema import FunnelsQuery, FunnelsQueryResponse
+from posthog.schema import (
+ FunnelsQuery,
+ FunnelsQueryResponse,
+ EventsNode,
+ BreakdownFilter,
+ FunnelsFilter,
+ FunnelVizType,
+ BreakdownAttributionType,
+ InsightDateRange,
+ IntervalType,
+)
+from posthog.test.base import _create_person, _create_event
@patch(
@@ -53,3 +65,111 @@ def test_assert_steps_flag_is_off(self):
results = cast(FunnelsQueryResponse, FunnelsQueryRunner(query=query, team=self.team).calculate())
self.assertFalse(results.isUdf)
+
+ def test_different_prop_val_in_strict_filter(self):
+ funnels_query = FunnelsQuery(
+ series=[EventsNode(event="first"), EventsNode(event="second")],
+ breakdownFilter=BreakdownFilter(breakdown="bd"),
+ dateRange=InsightDateRange(date_from="2024-01-01", date_to="2024-01-08"),
+ interval=IntervalType.DAY,
+ funnelsFilter=FunnelsFilter(funnelOrderType=FunnelOrderType.STRICT, funnelVizType=FunnelVizType.TRENDS),
+ )
+
+ _create_person(
+ distinct_ids=["many_other_events"],
+ team_id=self.team.pk,
+ properties={"test": "okay"},
+ )
+ _create_event(
+ team=self.team,
+ event="first",
+ distinct_id="many_other_events",
+ properties={"bd": "one"},
+ timestamp=datetime.datetime(2024, 1, 2),
+ )
+ _create_event(
+ team=self.team,
+ event="first",
+ distinct_id="many_other_events",
+ properties={"bd": "two"},
+ timestamp=datetime.datetime(2024, 1, 3),
+ )
+ _create_event(
+ team=self.team,
+ event="unmatched",
+ distinct_id="many_other_events",
+ properties={"bd": "one"},
+ timestamp=datetime.datetime(2024, 1, 4),
+ )
+ _create_event(
+ team=self.team,
+ event="unmatched",
+ distinct_id="many_other_events",
+ properties={"bd": "two"},
+ timestamp=datetime.datetime(2024, 1, 5),
+ )
+ _create_event(
+ team=self.team,
+ event="second",
+ distinct_id="many_other_events",
+ properties={"bd": "one"},
+ timestamp=datetime.datetime(2024, 1, 6),
+ )
+ _create_event(
+ team=self.team,
+ event="second",
+ distinct_id="many_other_events",
+ properties={"bd": "two"},
+ timestamp=datetime.datetime(2024, 1, 7),
+ )
+
+ # First Touchpoint (just "one")
+ results = FunnelsQueryRunner(query=funnels_query, team=self.team).calculate().results
+
+ self.assertEqual(
+ [
+ {
+ "breakdown_value": ["one"],
+ "count": 8,
+ "data": [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ ],
+ "days": [
+ "2024-01-01",
+ "2024-01-02",
+ "2024-01-03",
+ "2024-01-04",
+ "2024-01-05",
+ "2024-01-06",
+ "2024-01-07",
+ "2024-01-08",
+ ],
+ "labels": [
+ "1-Jan-2024",
+ "2-Jan-2024",
+ "3-Jan-2024",
+ "4-Jan-2024",
+ "5-Jan-2024",
+ "6-Jan-2024",
+ "7-Jan-2024",
+ "8-Jan-2024",
+ ],
+ }
+ ],
+ results,
+ )
+
+ # All events attribution
+ assert funnels_query.funnelsFilter is not None
+ funnels_query.funnelsFilter.breakdownAttributionType = BreakdownAttributionType.ALL_EVENTS
+ results = FunnelsQueryRunner(query=funnels_query, team=self.team).calculate().results
+
+ assert len(results) == 2
+ assert all(data == 0 for result in results for data in result["data"])
diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
index ed0ddee0e0f34..14201b4e6b41e 100644
--- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
+++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
@@ -52,8 +52,8 @@
# name: TestTrends.test_action_filtering_with_cohort.5
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -148,8 +148,8 @@
# name: TestTrends.test_action_filtering_with_cohort_poe_v2.5
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -184,12 +184,12 @@
# name: TestTrends.test_breakdown_by_group_props_person_on_events
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -304,12 +304,12 @@
# name: TestTrends.test_breakdown_by_group_props_with_person_filter_person_on_events
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -352,12 +352,12 @@
# name: TestTrends.test_breakdown_filtering_with_properties_in_new_format
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -392,12 +392,12 @@
# name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -432,12 +432,12 @@
# name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -472,12 +472,12 @@
# name: TestTrends.test_breakdown_filtering_with_properties_in_new_format.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -652,12 +652,12 @@
# name: TestTrends.test_breakdown_weekly_active_users_daily_based_on_action.2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -728,12 +728,12 @@
# name: TestTrends.test_breakdown_with_filter_groups_person_on_events
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -785,12 +785,12 @@
# name: TestTrends.test_breakdown_with_filter_groups_person_on_events_v2.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -852,12 +852,12 @@
# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1111,12 +1111,12 @@
# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1158,12 +1158,12 @@
# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1205,12 +1205,12 @@
# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1352,8 +1352,8 @@
# name: TestTrends.test_filter_events_by_precalculated_cohort.4
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-26 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-26 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-02 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -1435,8 +1435,8 @@
# name: TestTrends.test_filter_events_by_precalculated_cohort_poe_v2.4
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-26 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-26 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-02 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -1461,8 +1461,8 @@
# name: TestTrends.test_filtering_by_multiple_groups_person_on_events
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -1588,8 +1588,8 @@
# name: TestTrends.test_filtering_with_group_props_person_on_events
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -1622,12 +1622,12 @@
# name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1696,12 +1696,12 @@
# name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1770,12 +1770,12 @@
# name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1833,12 +1833,12 @@
# name: TestTrends.test_mau_with_breakdown_filtering_and_prop_filter_poe_v2.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -1896,8 +1896,8 @@
# name: TestTrends.test_non_deterministic_timezones
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2022-10-31 00:00:00', 6, 'US/Pacific')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2022-10-31 00:00:00', 6, 'US/Pacific')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2022-11-30 23:59:59', 6, 'US/Pacific')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -1954,12 +1954,12 @@
# name: TestTrends.test_person_filtering_in_cohort_in_action.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -2036,12 +2036,12 @@
# name: TestTrends.test_person_filtering_in_cohort_in_action_poe_v2.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -2086,8 +2086,8 @@
# name: TestTrends.test_person_property_filtering
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2129,8 +2129,8 @@
# name: TestTrends.test_person_property_filtering_clashing_with_event_property
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2172,8 +2172,8 @@
# name: TestTrends.test_person_property_filtering_clashing_with_event_property.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2198,8 +2198,8 @@
# name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2241,8 +2241,8 @@
# name: TestTrends.test_person_property_filtering_clashing_with_event_property_materialized.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2267,8 +2267,8 @@
# name: TestTrends.test_person_property_filtering_materialized
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2319,8 +2319,8 @@
# name: TestTrends.test_same_day_with_person_on_events_v2.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2345,8 +2345,8 @@
# name: TestTrends.test_same_day_with_person_on_events_v2.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2387,8 +2387,8 @@
# name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2429,8 +2429,8 @@
# name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.3
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2471,8 +2471,8 @@
# name: TestTrends.test_same_day_with_person_on_events_v2_latest_override.5
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-03 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2504,8 +2504,8 @@
# name: TestTrends.test_timezones_daily
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2530,8 +2530,8 @@
# name: TestTrends.test_timezones_daily.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2563,8 +2563,8 @@
# name: TestTrends.test_timezones_daily.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2609,8 +2609,8 @@
# name: TestTrends.test_timezones_daily.3
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2635,12 +2635,12 @@
# name: TestTrends.test_timezones_daily.4
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -2682,8 +2682,8 @@
# name: TestTrends.test_timezones_daily_minus_utc
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2708,8 +2708,8 @@
# name: TestTrends.test_timezones_daily_minus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2741,8 +2741,8 @@
# name: TestTrends.test_timezones_daily_minus_utc.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2787,8 +2787,8 @@
# name: TestTrends.test_timezones_daily_minus_utc.3
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2813,12 +2813,12 @@
# name: TestTrends.test_timezones_daily_minus_utc.4
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -2860,8 +2860,8 @@
# name: TestTrends.test_timezones_daily_plus_utc
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2886,8 +2886,8 @@
# name: TestTrends.test_timezones_daily_plus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-22 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2919,8 +2919,8 @@
# name: TestTrends.test_timezones_daily_plus_utc.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2965,8 +2965,8 @@
# name: TestTrends.test_timezones_daily_plus_utc.3
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -2991,12 +2991,12 @@
# name: TestTrends.test_timezones_daily_plus_utc.4
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3038,8 +3038,8 @@
# name: TestTrends.test_timezones_hourly_relative_from
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'UTC'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'UTC'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3071,8 +3071,8 @@
# name: TestTrends.test_timezones_hourly_relative_from.1
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'UTC'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'UTC'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3097,8 +3097,8 @@
# name: TestTrends.test_timezones_hourly_relative_from_minus_utc
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'America/Phoenix'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'America/Phoenix'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3130,8 +3130,8 @@
# name: TestTrends.test_timezones_hourly_relative_from_minus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'America/Phoenix'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'America/Phoenix'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3156,8 +3156,8 @@
# name: TestTrends.test_timezones_hourly_relative_from_plus_utc
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'Asia/Tokyo'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'Asia/Tokyo'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3189,8 +3189,8 @@
# name: TestTrends.test_timezones_hourly_relative_from_plus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'Asia/Tokyo'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 00:00:00', 6, 'Asia/Tokyo'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-05 10:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3215,8 +3215,8 @@
# name: TestTrends.test_timezones_weekly
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3241,8 +3241,8 @@
# name: TestTrends.test_timezones_weekly.1
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'UTC')), 3), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'UTC')), 3), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'UTC')), 3))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3267,8 +3267,8 @@
# name: TestTrends.test_timezones_weekly_minus_utc
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'America/Phoenix')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'America/Phoenix')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'America/Phoenix')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3293,8 +3293,8 @@
# name: TestTrends.test_timezones_weekly_minus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'America/Phoenix')), 3), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'America/Phoenix')), 3), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'America/Phoenix')), 3))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3319,8 +3319,8 @@
# name: TestTrends.test_timezones_weekly_plus_utc
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'Asia/Tokyo')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'Asia/Tokyo')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'Asia/Tokyo')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3345,8 +3345,8 @@
# name: TestTrends.test_timezones_weekly_plus_utc.1
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'Asia/Tokyo')), 3), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'Asia/Tokyo')), 3), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-26 23:59:59', 6, 'Asia/Tokyo')), 3))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3371,12 +3371,12 @@
# name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3430,12 +3430,12 @@
# name: TestTrends.test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-07-01 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3489,8 +3489,8 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3515,8 +3515,8 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3558,12 +3558,12 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3615,12 +3615,12 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3672,8 +3672,8 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.4
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3711,8 +3711,8 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.5
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3750,12 +3750,12 @@
# name: TestTrends.test_trends_aggregate_by_distinct_id.6
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-24 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-31 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3802,8 +3802,8 @@
# name: TestTrends.test_trends_any_event_total_count.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3828,8 +3828,8 @@
# name: TestTrends.test_trends_any_event_total_count.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -3866,12 +3866,12 @@
# name: TestTrends.test_trends_breakdown_cumulative.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3931,12 +3931,12 @@
# name: TestTrends.test_trends_breakdown_cumulative_poe_v2.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -3984,12 +3984,12 @@
# name: TestTrends.test_trends_breakdown_normalize_url
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -4235,8 +4235,8 @@
# name: TestTrends.test_trends_compare_day_interval_relative_range.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4261,8 +4261,8 @@
# name: TestTrends.test_trends_compare_day_interval_relative_range.2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-21 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4287,8 +4287,8 @@
# name: TestTrends.test_trends_compare_day_interval_relative_range.3
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4414,8 +4414,8 @@
# name: TestTrends.test_trends_count_per_user_average_daily
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4455,8 +4455,8 @@
# name: TestTrends.test_trends_count_per_user_average_daily_poe_v2
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4496,8 +4496,8 @@
# name: TestTrends.test_trends_groups_per_day
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-30 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-30 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-06 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4522,8 +4522,8 @@
# name: TestTrends.test_trends_groups_per_day_cumulative
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-30 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-30 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-06 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
FROM
(SELECT day_start AS day_start,
sum(count) OVER (
@@ -4565,8 +4565,8 @@
# name: TestTrends.test_trends_per_day_cumulative.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
FROM
(SELECT day_start AS day_start,
sum(count) OVER (
@@ -4608,8 +4608,8 @@
# name: TestTrends.test_trends_per_day_dau_cumulative.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
+ arrayFill(x -> ifNull(greater(x, 0), 0), arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date)) AS total
FROM
(SELECT day_start AS day_start,
sum(count) OVER (
@@ -4772,8 +4772,8 @@
# name: TestTrends.test_trends_with_hogql_math
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4848,8 +4848,8 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4887,8 +4887,8 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math.1
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -4926,12 +4926,12 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -4981,12 +4981,12 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -5036,12 +5036,12 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.2
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -5091,12 +5091,12 @@
# name: TestTrends.test_trends_with_session_property_total_volume_math_with_breakdowns.3
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -5254,8 +5254,8 @@
# name: TestTrends.test_weekly_active_users_daily
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-19 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5300,8 +5300,8 @@
# name: TestTrends.test_weekly_active_users_daily_minus_utc
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'America/Phoenix'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'America/Phoenix'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-19 23:59:59', 6, 'America/Phoenix'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5346,8 +5346,8 @@
# name: TestTrends.test_weekly_active_users_daily_plus_utc
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'Asia/Tokyo'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-08 00:00:00', 6, 'Asia/Tokyo'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-19 23:59:59', 6, 'Asia/Tokyo'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5392,8 +5392,8 @@
# name: TestTrends.test_weekly_active_users_filtering
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5448,8 +5448,8 @@
# name: TestTrends.test_weekly_active_users_filtering_materialized
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5504,8 +5504,8 @@
# name: TestTrends.test_weekly_active_users_hourly
'''
SELECT arrayMap(number -> plus(toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-09 06:00:00', 6, 'UTC'))), toIntervalHour(number)), range(0, plus(coalesce(dateDiff('hour', toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-09 06:00:00', 6, 'UTC'))), toStartOfHour(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-09 17:00:00', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5550,8 +5550,8 @@
# name: TestTrends.test_weekly_active_users_weekly
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'UTC')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-18 23:59:59', 6, 'UTC')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5596,8 +5596,8 @@
# name: TestTrends.test_weekly_active_users_weekly_minus_utc
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'America/Phoenix')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-18 23:59:59', 6, 'America/Phoenix')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -5642,8 +5642,8 @@
# name: TestTrends.test_weekly_active_users_weekly_plus_utc
'''
SELECT arrayMap(number -> plus(toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')), 0), toIntervalWeek(number)), range(0, plus(coalesce(dateDiff('week', toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-29 00:00:00', 6, 'Asia/Tokyo')), 0), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-18 23:59:59', 6, 'Asia/Tokyo')), 0))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr
index c97d2f4ac5467..f89c1599e5140 100644
--- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr
+++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends_data_warehouse_query.ambr
@@ -2,12 +2,12 @@
# name: TestTrendsDataWarehouseQuery.test_trends_breakdown
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -59,12 +59,12 @@
# name: TestTrendsDataWarehouseQuery.test_trends_breakdown_on_view.1
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -104,12 +104,12 @@
# name: TestTrendsDataWarehouseQuery.test_trends_breakdown_with_property
'''
SELECT groupArray(1)(date)[1] AS date,
- arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(total), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
+ arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total,
if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value
FROM
(SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total,
breakdown_value AS breakdown_value,
rowNumberInAllBlocks() AS row_number
FROM
@@ -144,8 +144,8 @@
# name: TestTrendsDataWarehouseQuery.test_trends_data_warehouse
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -170,8 +170,8 @@
# name: TestTrendsDataWarehouseQuery.test_trends_entity_property
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
@@ -196,8 +196,8 @@
# name: TestTrendsDataWarehouseQuery.test_trends_query_properties
'''
SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-01 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2023-01-07 23:59:59', 6, 'UTC'))))), 1))) AS date,
- arrayMap(_match_date -> arraySum(arraySlice(groupArray(count), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
- and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
+ arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x)
+ and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total
FROM
(SELECT sum(total) AS count,
day_start AS day_start
diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
index a7de68d01ef9b..e15175a7ffd7f 100644
--- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
+++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py
@@ -4832,3 +4832,89 @@ def test_trends_with_formula_and_multiple_breakdowns_hide_other_breakdowns(self)
)
breakdowns = [b for result in response.results for b in result["breakdown_value"]]
self.assertNotIn(BREAKDOWN_OTHER_STRING_LABEL, breakdowns)
+
+ def test_trends_aggregation_total_with_null(self):
+ self._create_events(
+ [
+ SeriesTestData(
+ distinct_id="p1",
+ events=[
+ Series(event="$pageview", timestamps=["2020-01-08T12:00:00Z"]),
+ ],
+ properties={
+ "$browser": "Chrome",
+ "prop": 30,
+ "bool_field": True,
+ "nullable_prop": "1.1",
+ },
+ ),
+ SeriesTestData(
+ distinct_id="p7",
+ events=[
+ Series(event="$pageview", timestamps=["2020-01-15T12:00:00Z"]),
+ ],
+ properties={
+ "$browser": "Chrome",
+ "prop": 30,
+ "bool_field": True,
+ "nullable_prop": "1.1",
+ },
+ ),
+ SeriesTestData(
+ distinct_id="p3",
+ events=[
+ Series(event="$pageview", timestamps=["2020-01-12T12:00:00Z"]),
+ ],
+ properties={
+ "$browser": "Chrome",
+ "prop": 30,
+ "bool_field": True,
+ "nullable_prop": "garbage",
+ },
+ ),
+ SeriesTestData(
+ distinct_id="p4",
+ events=[
+ Series(event="$pageview", timestamps=["2020-01-15T12:00:00Z"]),
+ ],
+ properties={
+ "$browser": "Chrome",
+ "prop": 40,
+ "bool_field": False,
+ "nullable_prop": "garbage",
+ },
+ ),
+ SeriesTestData(
+ distinct_id="p5",
+ events=[
+ Series(event="$pageview", timestamps=["2020-01-09T12:00:00Z"]),
+ ],
+ properties={
+ "$browser": "Chrome",
+ "prop": 40,
+ "bool_field": False,
+ "nullable_prop": "garbage",
+ },
+ ),
+ ]
+ )
+
+ # need to let property be inferred as a different type first and then override
+ # to get the `toFloat` cast
+ nullable_prop = PropertyDefinition.objects.get(name="nullable_prop")
+ nullable_prop.property_type = "Numeric"
+ nullable_prop.save()
+
+ nullable_prop = PropertyDefinition.objects.get(name="nullable_prop")
+
+ response = self._run_trends_query(
+ "2020-01-08",
+ "2020-01-15",
+ IntervalType.DAY,
+ [EventsNode(event="$pageview", math=PropertyMathType.SUM, math_property="nullable_prop")],
+ None,
+ BreakdownFilter(breakdown="$browser", breakdown_type=BreakdownType.EVENT),
+ )
+
+ assert len(response.results) == 1
+ assert response.results[0]["data"] == [1.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.1]
diff --git a/posthog/hogql_queries/insights/trends/trends_query_builder.py b/posthog/hogql_queries/insights/trends/trends_query_builder.py
index 393109d7fce90..7a93be2d548b0 100644
--- a/posthog/hogql_queries/insights/trends/trends_query_builder.py
+++ b/posthog/hogql_queries/insights/trends/trends_query_builder.py
@@ -319,7 +319,7 @@ def _outer_select_query(
_match_date ->
arraySum(
arraySlice(
- groupArray(count),
+ groupArray(ifNull(count, 0)),
indexOf(groupArray(day_start) as _days_for_count, _match_date) as _index,
arrayLastIndex(x -> x = _match_date, _days_for_count) - _index + 1
)
@@ -418,7 +418,7 @@ def _outer_select_query(
i -> acc[i] + x[i],
range(1, length(date) + 1)
),
- groupArray(total),
+ groupArray(ifNull(total, 0)),
arrayWithConstant(length(date), reinterpretAsFloat64(0))
) as total,
{breakdown_select}
@@ -629,9 +629,11 @@ def _inner_breakdown_subquery(self, query: ast.SelectQuery, breakdown: Breakdown
query.select.append(
ast.Alias(
alias="breakdown_value",
- expr=breakdown_array
- if breakdown.is_multiple_breakdown
- else parse_expr("{arr}[1]", placeholders={"arr": breakdown_array}),
+ expr=(
+ breakdown_array
+ if breakdown.is_multiple_breakdown
+ else parse_expr("{arr}[1]", placeholders={"arr": breakdown_array})
+ ),
)
)
diff --git a/posthog/migrations/0487_team_survey_config.py b/posthog/migrations/0487_team_survey_config.py
new file mode 100644
index 0000000000000..5fed324fc21ac
--- /dev/null
+++ b/posthog/migrations/0487_team_survey_config.py
@@ -0,0 +1,17 @@
+# Generated by Django 4.2.15 on 2024-10-10 17:48
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ("posthog", "0486_cohort_last_error_at"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="team",
+ name="survey_config",
+ field=models.JSONField(blank=True, null=True),
+ ),
+ ]
diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr
index 1361c0f5facfc..cd2bf85143d05 100644
--- a/posthog/models/filters/test/__snapshots__/test_filter.ambr
+++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr
@@ -26,6 +26,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -90,6 +91,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -154,6 +156,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -218,6 +221,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -282,6 +286,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py
index 89b119a0bfe98..30d2cb546892c 100644
--- a/posthog/models/team/team.py
+++ b/posthog/models/team/team.py
@@ -247,6 +247,7 @@ class Meta:
session_recording_linked_flag = models.JSONField(null=True, blank=True)
session_recording_network_payload_capture_config = models.JSONField(null=True, blank=True)
session_replay_config = models.JSONField(null=True, blank=True)
+ survey_config = models.JSONField(null=True, blank=True)
capture_console_log_opt_in = models.BooleanField(null=True, blank=True, default=True)
capture_performance_opt_in = models.BooleanField(null=True, blank=True, default=True)
surveys_opt_in = models.BooleanField(null=True, blank=True)
diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
index a7a08b736a83e..cf5c8dd3c7dde 100644
--- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
+++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
@@ -26,6 +26,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -90,6 +91,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -154,6 +156,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -218,6 +221,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -282,6 +286,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -454,6 +459,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -543,6 +549,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -911,6 +918,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1007,6 +1015,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1071,6 +1080,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1135,6 +1145,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1199,6 +1210,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1263,6 +1275,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1327,6 +1340,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1423,6 +1437,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1619,6 +1634,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -1753,6 +1769,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2213,6 +2230,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2347,6 +2365,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2495,6 +2514,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2846,6 +2866,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -2999,6 +3020,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3455,6 +3477,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3621,6 +3644,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -3894,6 +3918,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4117,6 +4142,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -4251,6 +4277,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5116,6 +5143,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5250,6 +5278,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5687,6 +5716,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -5842,6 +5872,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6342,6 +6373,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6495,6 +6527,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -6936,6 +6969,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -7070,6 +7104,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
index a15676702daaa..bae606bc1022e 100644
--- a/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
+++ b/posthog/tasks/test/__snapshots__/test_process_scheduled_changes.ambr
@@ -97,6 +97,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
@@ -359,6 +360,7 @@
"posthog_team"."session_recording_linked_flag",
"posthog_team"."session_recording_network_payload_capture_config",
"posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
"posthog_team"."capture_console_log_opt_in",
"posthog_team"."capture_performance_opt_in",
"posthog_team"."surveys_opt_in",
diff --git a/posthog/templates/head.html b/posthog/templates/head.html
index a4aaf1f620c94..472d901fb218c 100644
--- a/posthog/templates/head.html
+++ b/posthog/templates/head.html
@@ -39,6 +39,11 @@
window.SENTRY_ENVIRONMENT = '{{ sentry_environment | escapejs }}';
{% endif %}
+{% if stripe_public_key %}
+
+{% endif %}