diff --git a/.github/actions/run-backend-tests/action.yml b/.github/actions/run-backend-tests/action.yml
index 3f13a10f1e6dc..a45caaf8aef18 100644
--- a/.github/actions/run-backend-tests/action.yml
+++ b/.github/actions/run-backend-tests/action.yml
@@ -145,7 +145,6 @@ runs:
run: echo "PYTEST_ARGS=--snapshot-update" >> $GITHUB_ENV # We can only update snapshots within the PostHog org
# Tests
-
- name: Run FOSS tests
if: ${{ inputs.segment == 'FOSS' }}
env:
diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml
index ada203c979ec4..8b1312bf948a0 100644
--- a/.github/workflows/container-images-cd.yml
+++ b/.github/workflows/container-images-cd.yml
@@ -107,7 +107,7 @@ jobs:
message: |
{
"image_tag": "${{ steps.build.outputs.digest }}"
- }
+ }
- name: Check for changes in plugins directory
id: check_changes_plugins
diff --git a/.run/Celery.run.xml b/.run/Celery.run.xml
index 40853a2208f03..a3d60853762bd 100644
--- a/.run/Celery.run.xml
+++ b/.run/Celery.run.xml
@@ -5,12 +5,13 @@
-
-
-
+
+
+
+
diff --git a/bin/celery-queues.env b/bin/celery-queues.env
index ba4376ab93ca1..191de05c0a576 100644
--- a/bin/celery-queues.env
+++ b/bin/celery-queues.env
@@ -2,4 +2,4 @@
# Important: Add new queues to make Celery consume tasks from them.
# NOTE: Keep in sync with posthog/tasks/utils.py
-CELERY_WORKER_QUEUES=celery,stats,email,insight_export,insight_refresh,analytics_queries,exports,subscription_delivery,usage_reports
\ No newline at end of file
+CELERY_WORKER_QUEUES=celery,stats,email,insight_export,insight_refresh,analytics_queries,exports,subscription_delivery,usage_reports,session_replay_embeddings
diff --git a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr
index ce4c16033902c..8fc6f83c16cdd 100644
--- a/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr
+++ b/ee/clickhouse/views/test/__snapshots__/test_clickhouse_experiment_secondary_results.ambr
@@ -1,7 +1,7 @@
# serializer version: 1
# name: ClickhouseTestExperimentSecondaryResults.test_basic_secondary_metric_results
'''
- /* user_id:127 celery:posthog.tasks.tasks.sync_insight_caching_state */
+ /* user_id:124 celery:posthog.tasks.tasks.sync_insight_caching_state */
SELECT team_id,
date_diff('second', max(timestamp), now()) AS age
FROM events
diff --git a/ee/session_recordings/ai/__init__.py b/ee/session_recordings/ai/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/ee/session_recordings/ai/generate_embeddings.py b/ee/session_recordings/ai/generate_embeddings.py
new file mode 100644
index 0000000000000..5837b9d5f7cc3
--- /dev/null
+++ b/ee/session_recordings/ai/generate_embeddings.py
@@ -0,0 +1,192 @@
+from openai import OpenAI
+
+from typing import Dict, Any, List
+
+from prometheus_client import Histogram, Counter
+
+from posthog.models import Team
+
+from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
+from ee.session_recordings.ai.utils import (
+ SessionSummaryPromptData,
+ reduce_elements_chain,
+ simplify_window_id,
+ format_dates,
+ collapse_sequence_of_events,
+)
+from structlog import get_logger
+from posthog.clickhouse.client import sync_execute
+import datetime
+import pytz
+
+GENERATE_RECORDING_EMBEDDING_TIMING = Histogram(
+ "posthog_session_recordings_generate_recording_embedding",
+ "Time spent generating recording embeddings for a single session",
+)
+SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS = Counter(
+ "posthog_session_recordings_skipped_when_generating_embeddings",
+ "Number of sessions skipped when generating embeddings",
+)
+SESSION_EMBEDDINGS_GENERATED = Counter(
+ "posthog_session_recordings_embeddings_generated",
+ "Number of session embeddings generated",
+)
+SESSION_EMBEDDINGS_WRITTEN_TO_CLICKHOUSE = Counter(
+ "posthog_session_recordings_embeddings_written_to_clickhouse",
+ "Number of session embeddings written to Clickhouse",
+)
+
+logger = get_logger(__name__)
+
+# TODO move these to settings
+BATCH_FLUSH_SIZE = 10
+MIN_DURATION_INCLUDE_SECONDS = 120
+
+
+def fetch_recordings_without_embeddings(team: Team | int, offset=0) -> List[str]:
+ if isinstance(team, int):
+ team = Team.objects.get(id=team)
+
+ query = """
+ WITH embedding_ids AS
+ (
+ SELECT
+ session_id
+ from
+ session_replay_embeddings
+ where
+ team_id = %(team_id)s
+ -- don't load all data for all time
+ and generation_timestamp > now() - INTERVAL 7 DAY
+ )
+ SELECT session_id
+ FROM
+ session_replay_events
+ WHERE
+ session_id NOT IN embedding_ids
+ AND team_id = %(team_id)s
+ -- must be a completed session
+ and min_first_timestamp < now() - INTERVAL 1 DAY
+ -- let's not load all data for all time
+ -- will definitely need to do something about this length of time
+ and min_first_timestamp > now() - INTERVAL 7 DAY
+ GROUP BY session_id
+ HAVING dateDiff('second', min(min_first_timestamp), max(max_last_timestamp)) > %(min_duration_include_seconds)s
+ LIMIT %(batch_flush_size)s
+ -- when running locally the offset is used for paging
+ -- when running in celery the offset is not used
+ OFFSET %(offset)s
+ """
+
+ return [
+ x[0]
+ for x in sync_execute(
+ query,
+ {
+ "team_id": team.pk,
+ "batch_flush_size": BATCH_FLUSH_SIZE,
+ "offset": offset,
+ "min_duration_include_seconds": MIN_DURATION_INCLUDE_SECONDS,
+ },
+ )
+ ]
+
+
+def embed_batch_of_recordings(recordings: List[str], team: Team | int) -> None:
+ if isinstance(team, int):
+ team = Team.objects.get(id=team)
+
+ logger.info(f"processing {len(recordings)} recordings to embed for team {team.pk}")
+
+ while len(recordings) > 0:
+ batched_embeddings = []
+ for session_id in recordings:
+ with GENERATE_RECORDING_EMBEDDING_TIMING.time():
+ embeddings = generate_recording_embeddings(session_id=session_id, team=team)
+
+ if embeddings:
+ SESSION_EMBEDDINGS_GENERATED.inc()
+ batched_embeddings.append(
+ {
+ "session_id": session_id,
+ "team_id": team.pk,
+ "embeddings": embeddings,
+ }
+ )
+
+ if len(batched_embeddings) > 0:
+ flush_embeddings_to_clickhouse(embeddings=batched_embeddings)
+
+
+def flush_embeddings_to_clickhouse(embeddings: List[Dict[str, Any]]) -> None:
+ sync_execute("INSERT INTO session_replay_embeddings (session_id, team_id, embeddings) VALUES", embeddings)
+ SESSION_EMBEDDINGS_WRITTEN_TO_CLICKHOUSE.inc(len(embeddings))
+
+
+def generate_recording_embeddings(session_id: str, team: Team | int) -> List[float] | None:
+ if isinstance(team, int):
+ team = Team.objects.get(id=team)
+
+ client = OpenAI()
+
+ session_metadata = SessionReplayEvents().get_metadata(session_id=str(session_id), team=team)
+ if not session_metadata:
+ logger.error(f"no session metadata found for session_id {session_id}")
+ SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS.inc()
+ return None
+
+ session_events = SessionReplayEvents().get_events(
+ session_id=str(session_id),
+ team=team,
+ metadata=session_metadata,
+ events_to_ignore=[
+ "$feature_flag_called",
+ ],
+ )
+
+ if not session_events or not session_events[0] or not session_events[1]:
+ logger.error(f"no events found for session_id {session_id}")
+ SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS.inc()
+ return None
+
+ processed_sessions = collapse_sequence_of_events(
+ format_dates(
+ reduce_elements_chain(
+ simplify_window_id(SessionSummaryPromptData(columns=session_events[0], results=session_events[1]))
+ ),
+ start=datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC), # epoch timestamp
+ )
+ )
+
+ processed_sessions_index = processed_sessions.column_index("event")
+ current_url_index = processed_sessions.column_index("$current_url")
+ elements_chain_index = processed_sessions.column_index("elements_chain")
+
+ input = (
+ str(session_metadata)
+ + "\n"
+ + "\n".join(
+ compact_result(
+ event_name=result[processed_sessions_index] if processed_sessions_index is not None else "",
+ current_url=result[current_url_index] if current_url_index is not None else "",
+ elements_chain=result[elements_chain_index] if elements_chain_index is not None else "",
+ )
+ for result in processed_sessions.results
+ )
+ )
+
+ embeddings = (
+ client.embeddings.create(
+ input=input,
+ model="text-embedding-3-small",
+ )
+ .data[0]
+ .embedding
+ )
+
+ return embeddings
+
+
+def compact_result(event_name: str, current_url: int, elements_chain: Dict[str, str] | str) -> str:
+ elements_string = elements_chain if isinstance(elements_chain, str) else ", ".join(str(e) for e in elements_chain)
+ return f"{event_name} {current_url} {elements_string}"
diff --git a/posthog/session_recordings/session_summary/summarize_session.py b/ee/session_recordings/ai/utils.py
similarity index 60%
rename from posthog/session_recordings/session_summary/summarize_session.py
rename to ee/session_recordings/ai/utils.py
index 85e0891ccde52..cbdab8eeb97f7 100644
--- a/posthog/session_recordings/session_summary/summarize_session.py
+++ b/ee/session_recordings/ai/utils.py
@@ -3,46 +3,7 @@
from typing import List, Dict, Any
-import openai
-
-from prometheus_client import Histogram
-
-from posthog.api.activity_log import ServerTimingsGathered
-from posthog.models import User, Team
from posthog.models.element import chain_to_elements
-from posthog.session_recordings.models.session_recording import SessionRecording
-
-from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
-
-from posthog.utils import get_instance_region
-
-TOKENS_IN_PROMPT_HISTOGRAM = Histogram(
- "posthog_session_summary_tokens_in_prompt_histogram",
- "histogram of the number of tokens in the prompt used to generate a session summary",
- buckets=[
- 0,
- 10,
- 50,
- 100,
- 500,
- 1000,
- 2000,
- 3000,
- 4000,
- 5000,
- 6000,
- 7000,
- 8000,
- 10000,
- 20000,
- 30000,
- 40000,
- 50000,
- 100000,
- 128000,
- float("inf"),
- ],
-)
@dataclasses.dataclass
@@ -243,102 +204,3 @@ def collapse_sequence_of_events(session_events: SessionSummaryPromptData) -> Ses
collapsed_results.append(result)
return dataclasses.replace(session_events, results=collapsed_results)
-
-
-def summarize_recording(recording: SessionRecording, user: User, team: Team):
- timer = ServerTimingsGathered()
-
- with timer("get_metadata"):
- session_metadata = SessionReplayEvents().get_metadata(session_id=str(recording.session_id), team=team)
- if not session_metadata:
- raise ValueError(f"no session metadata found for session_id {recording.session_id}")
-
- with timer("get_events"):
- session_events = SessionReplayEvents().get_events(
- session_id=str(recording.session_id),
- team=team,
- metadata=session_metadata,
- events_to_ignore=[
- "$feature_flag_called",
- ],
- )
- if not session_events or not session_events[0] or not session_events[1]:
- raise ValueError(f"no events found for session_id {recording.session_id}")
-
- # convert session_metadata to a Dict from a TypedDict
- # so that we can amend its values freely
- session_metadata_dict = dict(session_metadata)
-
- del session_metadata_dict["distinct_id"]
- start_time = session_metadata["start_time"]
- session_metadata_dict["start_time"] = start_time.isoformat()
- session_metadata_dict["end_time"] = session_metadata["end_time"].isoformat()
-
- with timer("generate_prompt"):
- prompt_data = deduplicate_urls(
- collapse_sequence_of_events(
- format_dates(
- reduce_elements_chain(
- simplify_window_id(
- SessionSummaryPromptData(columns=session_events[0], results=session_events[1])
- )
- ),
- start=start_time,
- )
- )
- )
-
- instance_region = get_instance_region() or "HOBBY"
-
- with timer("openai_completion"):
- result = openai.chat.completions.create(
- # model="gpt-4-1106-preview", # allows 128k tokens
- model="gpt-4", # allows 8k tokens
- temperature=0.7,
- messages=[
- {
- "role": "system",
- "content": """
- Session Replay is PostHog's tool to record visits to web sites and apps.
- We also gather events that occur like mouse clicks and key presses.
- You write two or three sentence concise and simple summaries of those sessions based on a prompt.
- You are more likely to mention errors or things that look like business success such as checkout events.
- You don't help with other knowledge.""",
- },
- {
- "role": "user",
- "content": f"""the session metadata I have is {session_metadata_dict}.
- it gives an overview of activity and duration""",
- },
- {
- "role": "user",
- "content": f"""
- URLs associated with the events can be found in this mapping {prompt_data.url_mapping}.
- """,
- },
- {
- "role": "user",
- "content": f"""the session events I have are {prompt_data.results}.
- with columns {prompt_data.columns}.
- they give an idea of what happened and when,
- if present the elements_chain extracted from the html can aid in understanding
- but should not be directly used in your response""",
- },
- {
- "role": "user",
- "content": """
- generate a two or three sentence summary of the session.
- use as concise and simple language as is possible.
- assume a reading age of around 12 years old.
- generate no text other than the summary.""",
- },
- ],
- user=f"{instance_region}/{user.pk}", # allows 8k tokens
- )
-
- usage = result.usage.prompt_tokens if result.usage else None
- if usage:
- TOKENS_IN_PROMPT_HISTOGRAM.observe(usage)
-
- content: str = result.choices[0].message.content or ""
- return {"content": content, "timings": timer.get_all_timings()}
diff --git a/ee/session_recordings/session_summary/__init__.py b/ee/session_recordings/session_summary/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/ee/session_recordings/session_summary/summarize_session.py b/ee/session_recordings/session_summary/summarize_session.py
new file mode 100644
index 0000000000000..26541f0a4fd09
--- /dev/null
+++ b/ee/session_recordings/session_summary/summarize_session.py
@@ -0,0 +1,147 @@
+import openai
+
+from prometheus_client import Histogram
+
+from posthog.api.activity_log import ServerTimingsGathered
+from posthog.models import User, Team
+from posthog.session_recordings.models.session_recording import SessionRecording
+
+from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
+
+from posthog.utils import get_instance_region
+
+from ee.session_recordings.ai.utils import (
+ SessionSummaryPromptData,
+ reduce_elements_chain,
+ simplify_window_id,
+ deduplicate_urls,
+ format_dates,
+ collapse_sequence_of_events,
+)
+
+TOKENS_IN_PROMPT_HISTOGRAM = Histogram(
+ "posthog_session_summary_tokens_in_prompt_histogram",
+ "histogram of the number of tokens in the prompt used to generate a session summary",
+ buckets=[
+ 0,
+ 10,
+ 50,
+ 100,
+ 500,
+ 1000,
+ 2000,
+ 3000,
+ 4000,
+ 5000,
+ 6000,
+ 7000,
+ 8000,
+ 10000,
+ 20000,
+ 30000,
+ 40000,
+ 50000,
+ 100000,
+ 128000,
+ float("inf"),
+ ],
+)
+
+
+def summarize_recording(recording: SessionRecording, user: User, team: Team):
+ timer = ServerTimingsGathered()
+
+ with timer("get_metadata"):
+ session_metadata = SessionReplayEvents().get_metadata(session_id=str(recording.session_id), team=team)
+ if not session_metadata:
+ raise ValueError(f"no session metadata found for session_id {recording.session_id}")
+
+ with timer("get_events"):
+ session_events = SessionReplayEvents().get_events(
+ session_id=str(recording.session_id),
+ team=team,
+ metadata=session_metadata,
+ events_to_ignore=[
+ "$feature_flag_called",
+ ],
+ )
+ if not session_events or not session_events[0] or not session_events[1]:
+ raise ValueError(f"no events found for session_id {recording.session_id}")
+
+ # convert session_metadata to a Dict from a TypedDict
+ # so that we can amend its values freely
+ session_metadata_dict = dict(session_metadata)
+
+ del session_metadata_dict["distinct_id"]
+ start_time = session_metadata["start_time"]
+ session_metadata_dict["start_time"] = start_time.isoformat()
+ session_metadata_dict["end_time"] = session_metadata["end_time"].isoformat()
+
+ with timer("generate_prompt"):
+ prompt_data = deduplicate_urls(
+ collapse_sequence_of_events(
+ format_dates(
+ reduce_elements_chain(
+ simplify_window_id(
+ SessionSummaryPromptData(columns=session_events[0], results=session_events[1])
+ )
+ ),
+ start=start_time,
+ )
+ )
+ )
+
+ instance_region = get_instance_region() or "HOBBY"
+
+ with timer("openai_completion"):
+ result = openai.chat.completions.create(
+ # model="gpt-4-1106-preview", # allows 128k tokens
+ model="gpt-4", # allows 8k tokens
+ temperature=0.7,
+ messages=[
+ {
+ "role": "system",
+ "content": """
+ Session Replay is PostHog's tool to record visits to web sites and apps.
+ We also gather events that occur like mouse clicks and key presses.
+ You write two or three sentence concise and simple summaries of those sessions based on a prompt.
+ You are more likely to mention errors or things that look like business success such as checkout events.
+ You don't help with other knowledge.""",
+ },
+ {
+ "role": "user",
+ "content": f"""the session metadata I have is {session_metadata_dict}.
+ it gives an overview of activity and duration""",
+ },
+ {
+ "role": "user",
+ "content": f"""
+ URLs associated with the events can be found in this mapping {prompt_data.url_mapping}.
+ """,
+ },
+ {
+ "role": "user",
+ "content": f"""the session events I have are {prompt_data.results}.
+ with columns {prompt_data.columns}.
+ they give an idea of what happened and when,
+ if present the elements_chain extracted from the html can aid in understanding
+ but should not be directly used in your response""",
+ },
+ {
+ "role": "user",
+ "content": """
+ generate a two or three sentence summary of the session.
+ use as concise and simple language as is possible.
+ assume a reading age of around 12 years old.
+ generate no text other than the summary.""",
+ },
+ ],
+ user=f"{instance_region}/{user.pk}", # allows 8k tokens
+ )
+
+ usage = result.usage.prompt_tokens if result.usage else None
+ if usage:
+ TOKENS_IN_PROMPT_HISTOGRAM.observe(usage)
+
+ content: str = result.choices[0].message.content or ""
+ return {"content": content, "timings": timer.get_all_timings()}
diff --git a/ee/session_recordings/session_summary/test/__init__.py b/ee/session_recordings/session_summary/test/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/posthog/session_recordings/session_summary/test/test_summarize_session.py b/ee/session_recordings/session_summary/test/test_summarize_session.py
similarity index 98%
rename from posthog/session_recordings/session_summary/test/test_summarize_session.py
rename to ee/session_recordings/session_summary/test/test_summarize_session.py
index 266e19ef19c19..69412608dd3b9 100644
--- a/posthog/session_recordings/session_summary/test/test_summarize_session.py
+++ b/ee/session_recordings/session_summary/test/test_summarize_session.py
@@ -2,7 +2,7 @@
from dateutil.parser import isoparse
-from posthog.session_recordings.session_summary.summarize_session import (
+from ee.session_recordings.session_summary.summarize_session import (
format_dates,
simplify_window_id,
deduplicate_urls,
diff --git a/ee/tasks/__init__.py b/ee/tasks/__init__.py
index 4bc793399424b..3eb410b6c8e67 100644
--- a/ee/tasks/__init__.py
+++ b/ee/tasks/__init__.py
@@ -7,6 +7,7 @@
handle_subscription_value_change,
schedule_all_subscriptions,
)
+from .replay import embed_batch_of_recordings_task, generate_recordings_embeddings_batch
# As our EE tasks are not included at startup for Celery, we need to ensure they are declared here so that they are imported by posthog/settings/celery.py
@@ -16,4 +17,6 @@
"schedule_all_subscriptions",
"deliver_subscription_report",
"handle_subscription_value_change",
+ "embed_batch_of_recordings_task",
+ "generate_recordings_embeddings_batch",
]
diff --git a/ee/tasks/replay.py b/ee/tasks/replay.py
new file mode 100644
index 0000000000000..4554850d01801
--- /dev/null
+++ b/ee/tasks/replay.py
@@ -0,0 +1,48 @@
+from typing import Any, List
+
+import structlog
+from celery import shared_task
+
+from ee.session_recordings.ai.generate_embeddings import (
+ fetch_recordings_without_embeddings,
+ embed_batch_of_recordings,
+)
+from posthog import settings
+from posthog.models import Team
+from posthog.tasks.utils import CeleryQueue
+
+logger = structlog.get_logger(__name__)
+
+
+@shared_task(ignore_result=False, queue=CeleryQueue.SESSION_REPLAY_EMBEDDINGS.value)
+def embed_batch_of_recordings_task(recordings: List[Any], team_id: int) -> None:
+ embed_batch_of_recordings(recordings, team_id)
+
+
+@shared_task(ignore_result=True)
+def generate_recordings_embeddings_batch() -> None:
+ # see https://docs.celeryq.dev/en/stable/userguide/canvas.html
+ # we have three jobs to do here
+ # 1. get a batch of recordings
+ # 2. for each recording - ideally in parallel - generate an embedding
+ # 3. update CH with the embeddings in one update operation
+ # in Celery that's a chain of tasks
+ # with step 2 being a group of tasks
+ # chord(
+ # embed_single_recording.si(recording.session_id, recording.team_id)
+ # for recording in fetch_recordings_without_embeddings(int(team))
+ # )(generate_recordings_embeddings_batch_on_complete.si())
+ # but even the docs call out performance impact of synchronising tasks
+ #
+ # so, for now, we'll do that naively
+
+ for team in settings.REPLAY_EMBEDDINGS_ALLOWED_TEAMS:
+ try:
+ recordings = fetch_recordings_without_embeddings(int(team))
+ embed_batch_of_recordings_task.si(recordings, int(team)).apply_async()
+ except Team.DoesNotExist:
+ logger.info(f"[generate_recordings_embeddings_batch] Team {team} does not exist. Skipping.")
+ pass
+ except Exception as e:
+ logger.error(f"[generate_recordings_embeddings_batch] Error: {e}.", exc_info=True, error=e)
+ pass
diff --git a/frontend/__snapshots__/components-command-bar--actions--dark.png b/frontend/__snapshots__/components-command-bar--actions--dark.png
index 55ff12cdf7a64..e15ffa5164b1b 100644
Binary files a/frontend/__snapshots__/components-command-bar--actions--dark.png and b/frontend/__snapshots__/components-command-bar--actions--dark.png differ
diff --git a/frontend/__snapshots__/components-command-bar--actions--light.png b/frontend/__snapshots__/components-command-bar--actions--light.png
index 3b8e1b2efd516..a886435fba6b1 100644
Binary files a/frontend/__snapshots__/components-command-bar--actions--light.png and b/frontend/__snapshots__/components-command-bar--actions--light.png differ
diff --git a/frontend/__snapshots__/components-command-bar--search--dark.png b/frontend/__snapshots__/components-command-bar--search--dark.png
index c5d76efb38fcd..4546e3221a8b2 100644
Binary files a/frontend/__snapshots__/components-command-bar--search--dark.png and b/frontend/__snapshots__/components-command-bar--search--dark.png differ
diff --git a/frontend/__snapshots__/components-command-bar--search--light.png b/frontend/__snapshots__/components-command-bar--search--light.png
index b8791bfc9ce45..fbcf9433f8f5f 100644
Binary files a/frontend/__snapshots__/components-command-bar--search--light.png and b/frontend/__snapshots__/components-command-bar--search--light.png differ
diff --git a/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--dark.png b/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--dark.png
new file mode 100644
index 0000000000000..a1aada324a692
Binary files /dev/null and b/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--dark.png differ
diff --git a/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--light.png b/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--light.png
new file mode 100644
index 0000000000000..409ddc95e93d1
Binary files /dev/null and b/frontend/__snapshots__/components-itemperformanceevent--initial-body-display--light.png differ
diff --git a/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--dark.png b/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--dark.png
new file mode 100644
index 0000000000000..c6e46bbfdedf9
Binary files /dev/null and b/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--dark.png differ
diff --git a/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--light.png b/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--light.png
new file mode 100644
index 0000000000000..bc602073b3194
Binary files /dev/null and b/frontend/__snapshots__/components-itemperformanceevent--initial-headers-display--light.png differ
diff --git a/frontend/__snapshots__/components-not-found--not-found--dark.png b/frontend/__snapshots__/components-not-found--not-found--dark.png
index 0490729ac8368..30c06028ae035 100644
Binary files a/frontend/__snapshots__/components-not-found--not-found--dark.png and b/frontend/__snapshots__/components-not-found--not-found--dark.png differ
diff --git a/frontend/__snapshots__/components-not-found--not-found--light.png b/frontend/__snapshots__/components-not-found--not-found--light.png
index 5886f7561c08d..2824a06203bbd 100644
Binary files a/frontend/__snapshots__/components-not-found--not-found--light.png and b/frontend/__snapshots__/components-not-found--not-found--light.png differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--basic--dark.png b/frontend/__snapshots__/layout-feature-previews-modal--basic--dark.png
deleted file mode 100644
index 134aaab94cf59..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--basic--dark.png and /dev/null differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--basic--light.png b/frontend/__snapshots__/layout-feature-previews-modal--basic--light.png
deleted file mode 100644
index 134aaab94cf59..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--basic--light.png and /dev/null differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--empty--dark.png b/frontend/__snapshots__/layout-feature-previews-modal--empty--dark.png
deleted file mode 100644
index 439e583937f2f..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--empty--dark.png and /dev/null differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--empty--light.png b/frontend/__snapshots__/layout-feature-previews-modal--empty--light.png
deleted file mode 100644
index 19aebcf456dc1..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--empty--light.png and /dev/null differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--dark.png b/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--dark.png
deleted file mode 100644
index 134aaab94cf59..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--dark.png and /dev/null differ
diff --git a/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--light.png b/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--light.png
deleted file mode 100644
index 134aaab94cf59..0000000000000
Binary files a/frontend/__snapshots__/layout-feature-previews-modal--with-constrained-feature--light.png and /dev/null differ
diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled--light.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled--light.png
index 4d1e6546cad32..b953663cb43ea 100644
Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled--light.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled--light.png differ
diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled-with-reason--light.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled-with-reason--light.png
index 4d1e6546cad32..b953663cb43ea 100644
Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled-with-reason--light.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--disabled-with-reason--light.png differ
diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--dark.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--dark.png
index 6672835b2b74f..d56bb3b8b5951 100644
Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--dark.png differ
diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--light.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--light.png
index cff1760f7f6d3..e4ff4460dcf2e 100644
Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--light.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--no-label--light.png differ
diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png
index 176e35273415b..62eccad63930c 100644
Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ
diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png
index 82b82d8fc6329..afefe8eb9e546 100644
Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png
index 5e7de45b99fe9..8492cc3e6e6b8 100644
Binary files a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png and b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png
index a22ddbb2215c5..f71bab38378ed 100644
Binary files a/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png and b/frontend/__snapshots__/scenes-app-experiments--experiment-not-found--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--dark.png
index a898fec7fd1aa..b4993bddf4163 100644
Binary files a/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--light.png b/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--light.png
index 3ab2a31a26205..f48ee685ab294 100644
Binary files a/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--feature-flag-not-found--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png
index 87a76f1653c40..5ba95095f4a18 100644
Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png
index ea13f3250e585..43981441292e7 100644
Binary files a/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png and b/frontend/__snapshots__/scenes-app-feature-flags--new-feature-flag--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-features--not-found-early-access--dark.png b/frontend/__snapshots__/scenes-app-features--not-found-early-access--dark.png
index cd9f9b82f551f..398b149c24685 100644
Binary files a/frontend/__snapshots__/scenes-app-features--not-found-early-access--dark.png and b/frontend/__snapshots__/scenes-app-features--not-found-early-access--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-features--not-found-early-access--light.png b/frontend/__snapshots__/scenes-app-features--not-found-early-access--light.png
index 3327b43d90d26..f69936ed7ad9b 100644
Binary files a/frontend/__snapshots__/scenes-app-features--not-found-early-access--light.png and b/frontend/__snapshots__/scenes-app-features--not-found-early-access--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-insights--trends-line--light--webkit.png b/frontend/__snapshots__/scenes-app-insights--trends-line--light--webkit.png
index d2643fc390633..d60cbfa5e7556 100644
Binary files a/frontend/__snapshots__/scenes-app-insights--trends-line--light--webkit.png and b/frontend/__snapshots__/scenes-app-insights--trends-line--light--webkit.png differ
diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--dark.png b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--dark.png
index a3f6c4961ab2e..97ed4ba13cded 100644
Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--light.png b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--light.png
index 50c5bdf99404f..d25c27a388daa 100644
Binary files a/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--light.png and b/frontend/__snapshots__/scenes-app-notebooks--notebook-not-found--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png
index 36a0466e6c1f7..76d4b7bc85b67 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png
index 897331d9aaf6f..f173b278ad65f 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--dark.png
index d636ef141dcfb..b8ea18548b8a5 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--light.png
index 90025aff9294f..33892e82d03b8 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-configuration-404--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--dark.png
index d636ef141dcfb..b8ea18548b8a5 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--light.png
index 90025aff9294f..33892e82d03b8 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-logs-batch-export--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--dark.png
index cfa99376fa2e2..9afb43cee1fdb 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--light.png
index 31cb858d40508..b21fdc8a2fe88 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png
index cfa99376fa2e2..9afb43cee1fdb 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png
index 78bf1462dc8b8..e6dd3b53ae662 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-not-found--dark.png b/frontend/__snapshots__/scenes-app-surveys--survey-not-found--dark.png
index 0d7aa6975e8db..36361aaa2ed0c 100644
Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-not-found--dark.png and b/frontend/__snapshots__/scenes-app-surveys--survey-not-found--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-surveys--survey-not-found--light.png b/frontend/__snapshots__/scenes-app-surveys--survey-not-found--light.png
index 149f444c28a92..c11e131e006e3 100644
Binary files a/frontend/__snapshots__/scenes-app-surveys--survey-not-found--light.png and b/frontend/__snapshots__/scenes-app-surveys--survey-not-found--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png
index 8af57a632597d..441e34109f7a1 100644
Binary files a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png and b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--light.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--light.png
index 57711a72c77c7..709cceb9886a7 100644
Binary files a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--light.png and b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--light.png differ
diff --git a/frontend/src/exporter/Exporter.scss b/frontend/src/exporter/Exporter.scss
index df0d456df55d7..76f92199f5689 100644
--- a/frontend/src/exporter/Exporter.scss
+++ b/frontend/src/exporter/Exporter.scss
@@ -1,9 +1,7 @@
@import '../styles/mixins';
body.ExporterBody {
- &.posthog-3000 {
- overflow: initial;
- }
+ overflow: initial;
}
.Exporter {
diff --git a/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss
index f2796b41f109e..74965e73091cc 100644
--- a/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss
+++ b/frontend/src/layout/navigation-3000/components/KeyboardShortcut.scss
@@ -8,21 +8,17 @@
justify-content: center;
min-width: 1.25rem;
height: 1.25rem;
- padding: 0 0.1875rem;
+ padding: 0.125rem 0.25rem;
+ font-size: 0.75rem;
color: var(--default);
text-transform: capitalize;
user-select: none;
background: var(--accent-3000);
+ border-color: var(--secondary-3000-button-border-hover);
border-width: 1px;
+ border-bottom-width: 2px;
border-radius: 0.25rem;
- .posthog-3000 & {
- padding: 0.125rem 0.25rem;
- font-size: 0.75rem;
- border-color: var(--secondary-3000-button-border-hover);
- border-bottom-width: 2px;
- }
-
.KeyboardShortcut--muted > & {
color: var(--muted);
background: none;
diff --git a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx
index e1ccd80784230..46164e0270db1 100644
--- a/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx
+++ b/frontend/src/layout/navigation-3000/sidepanel/SidePanel.tsx
@@ -177,6 +177,7 @@ export function SidePanel(): JSX.Element | null {
activeTab === tab ? closeSidePanel() : openSidePanel(tab as SidePanelTab)
}
data-attr={`sidepanel-tab-${tab}`}
+ data-ph-capture-attribute-state-before-click={activeTab === tab ? 'open' : 'closed'}
active={activeTab === tab}
type="secondary"
status="alt"
diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx
index f2a372f176643..381cd59181267 100644
--- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx
+++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx
@@ -19,7 +19,7 @@ export const SidePanelSupport = (): JSX.Element => {
<>
-
+
diff --git a/frontend/src/lib/components/CommandBar/index.scss b/frontend/src/lib/components/CommandBar/index.scss
index c42b01543f804..85e6f6422f177 100644
--- a/frontend/src/lib/components/CommandBar/index.scss
+++ b/frontend/src/lib/components/CommandBar/index.scss
@@ -1,4 +1,4 @@
-.CommandBar__input {
+.LemonInput.CommandBar__input {
height: 2.75rem;
padding-right: 0.375rem;
padding-left: 0.75rem;
diff --git a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx
index 71cac99c617fc..6a56553aae0d1 100644
--- a/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx
+++ b/frontend/src/lib/components/CommandPalette/DebugCHQueries.tsx
@@ -157,7 +157,6 @@ function DebugCHQueries(): JSX.Element {
dataSource={filteredQueries}
loading={queriesLoading}
loadingSkeletonRows={5}
- size="small"
pagination={undefined}
/>
>
diff --git a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss
index 244d3ccc5c711..6ccb132efdc5f 100644
--- a/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss
+++ b/frontend/src/lib/components/DateFilter/RollingDateRangeFilter.scss
@@ -1,7 +1,7 @@
.RollingDateRangeFilter {
display: flex;
align-items: center;
- height: 2rem;
+ height: 1.6875rem;
min-height: 2rem;
padding: 1.25rem 0.5rem;
font-size: 0.875rem;
@@ -11,10 +11,6 @@
cursor: pointer;
transition: background 0.3s ease;
- .posthog-3000 & {
- height: 1.6875rem;
- }
-
&:hover {
background-color: var(--mid);
}
@@ -40,69 +36,47 @@
box-sizing: border-box;
display: flex;
align-items: center;
- height: 2rem;
+ height: 1.6875rem;
margin: 0;
margin-right: 0.25rem;
margin-left: 0.25rem;
- line-height: 1.25rem;
+ line-height: 1.5rem;
background-color: var(--bg-light);
border: 1px solid var(--border);
border-radius: var(--radius);
- .posthog-3000 & {
- height: 1.6875rem;
- line-height: 1.5rem;
- }
-
.LemonInput {
width: 3rem;
+ height: unset;
min-height: 0;
padding: 0;
border: none;
- .posthog-3000 & {
- height: unset;
- }
-
input {
text-align: center;
}
}
.RollingDateRangeFilter__counter__step {
- padding: 0.25rem;
- margin: 0 0.25rem;
- border-radius: var(--radius);
+ width: 1.25rem;
+ height: 100%;
+ padding: 0;
+ margin: 0;
+ text-align: center;
+ border-radius: calc(var(--radius) - 1px);
&:first-child {
- .posthog-3000 & {
- border-top-right-radius: 0;
- border-bottom-right-radius: 0;
- }
+ border-top-right-radius: 0;
+ border-bottom-right-radius: 0;
}
&:last-child {
- .posthog-3000 & {
- border-top-left-radius: 0;
- border-bottom-left-radius: 0;
- }
- }
-
- .posthog-3000 & {
- width: 1.25rem;
- height: 100%;
- padding: 0;
- margin: 0;
- text-align: center;
- border-radius: calc(var(--radius) - 1px);
+ border-top-left-radius: 0;
+ border-bottom-left-radius: 0;
}
&:hover {
- background-color: var(--primary-highlight);
-
- .posthog-3000 & {
- background-color: var(--accent-3000);
- }
+ background-color: var(--accent-3000);
}
}
}
diff --git a/frontend/src/lib/components/DatePicker.scss b/frontend/src/lib/components/DatePicker.scss
index 083127c9dff86..43ecd38624633 100644
--- a/frontend/src/lib/components/DatePicker.scss
+++ b/frontend/src/lib/components/DatePicker.scss
@@ -1,130 +1,101 @@
.ant-picker {
+ color: var(--default);
+ background: var(--lemon-button-bg-color);
+ border-color: var(--secondary-3000-button-border);
box-shadow: none !important;
-}
-.posthog-3000 {
- .ant-picker {
+ .ant-picker-suffix {
color: var(--default);
- background: var(--lemon-button-bg-color);
- border-color: var(--secondary-3000-button-border);
-
- .ant-picker-suffix {
- color: var(--default);
- }
}
- .ant-picker:hover {
+ &:hover {
border-color: var(--secondary-3000-button-border-hover);
}
+}
- .ant-picker-panel-container {
- color: var(--default);
- background: var(--bg-3000);
- border: 1px solid var(--border);
+.ant-picker-panel-container {
+ color: var(--default);
+ background: var(--bg-3000);
+ border: 1px solid var(--border);
- * {
- border-color: var(--border);
- }
+ * {
+ border-color: var(--border);
}
+}
- .ant-picker-time-panel-column > li.ant-picker-time-panel-cell-selected .ant-picker-time-panel-cell-inner {
- background: var(--primary-highlight);
- }
+.ant-picker-time-panel-column > li.ant-picker-time-panel-cell-selected .ant-picker-time-panel-cell-inner {
+ background: var(--primary-highlight);
- .ant-picker-time-panel .ant-picker-time-panel-column:nth-child(3)::after {
- // :HACKY: fix to keep the whole am/pm section in view
- display: none;
+ [theme='dark'] & {
+ background: rgba(#f7a503, 0.4);
}
+}
- .ant-picker-cell .ant-picker-cell-inner {
- border-radius: var(--radius);
- }
+.ant-picker-time-panel .ant-picker-time-panel-column:nth-child(3)::after {
+ // :HACKY: fix to keep the whole am/pm section in view
+ display: none;
+}
- .ant-picker-cell.ant-picker-cell-selected .ant-picker-cell-inner {
- color: var(--default);
- background: var(--primary-highlight);
- }
+.ant-picker-cell .ant-picker-cell-inner {
+ border-radius: var(--radius);
+}
- .ant-picker-cell.ant-picker-cell-today .ant-picker-cell-inner::before {
- background: none;
- border-color: var(--text-secondary-3000);
- }
+.ant-picker-cell.ant-picker-cell-selected .ant-picker-cell-inner {
+ color: var(--default);
+ background: var(--primary-highlight);
- .ant-picker-cell:hover:not(
- .ant-picker-cell-selected,
- .ant-picker-cell-range-start,
- .ant-picker-cell-range-end,
- .ant-picker-cell-range-hover-start,
- .ant-picker-cell-range-hover-end
- )
- .ant-picker-cell-inner {
- background: var(--secondary-3000);
+ [theme='dark'] & {
+ color: var(--default);
+ background: rgba(#f7a503, 0.4);
}
+}
- .ant-picker-cell:hover:not(.ant-picker-cell-in-view) .ant-picker-cell-inner,
- .ant-picker-cell:hover:not(
- .ant-picker-cell-today,
- .ant-picker-cell-selected,
- .ant-picker-cell-range-start,
- .ant-picker-cell-range-end,
- .ant-picker-cell-range-hover-start,
- .ant-picker-cell-range-hover-end
- )
- .ant-picker-cell-inner,
- .ant-picker-time-panel-column > li.ant-picker-time-panel-cell .ant-picker-time-panel-cell-inner:hover {
- background: var(--secondary-3000);
- }
+.ant-picker-cell.ant-picker-cell-today .ant-picker-cell-inner::before {
+ background: none;
+ border-color: var(--text-secondary-3000);
- .ant-picker-footer .ant-btn-primary {
- color: var(--primary);
- text-shadow: none;
+ [theme='dark'] & {
background: none;
- border-color: var(--primary);
- border-radius: 0.25rem;
- box-shadow: none;
- }
-
- .ant-picker-footer .ant-btn-primary:not(:disabled):hover {
- color: #fff;
- background: var(--primary);
- }
-
- .ant-picker-footer .ant-picker-now-btn:hover {
- color: var(--primary);
- }
-
- .ant-picker-ok .ant-btn-primary span {
- text-transform: uppercase;
+ border-color: var(--text-secondary-3000);
}
}
-.posthog-3000[theme='dark'] {
- .ant-picker-time-panel-column > li.ant-picker-time-panel-cell-selected .ant-picker-time-panel-cell-inner {
- background: rgba(#f7a503, 0.4);
- }
-
- .ant-picker-cell:hover:not(
- .ant-picker-cell-selected,
- .ant-picker-cell-range-start,
- .ant-picker-cell-range-end,
- .ant-picker-cell-range-hover-start,
- .ant-picker-cell-range-hover-end
- )
- .ant-picker-cell-inner {
+.ant-picker-cell:hover:not(.ant-picker-cell-in-view) .ant-picker-cell-inner,
+.ant-picker-cell:hover:not(
+ .ant-picker-cell-today,
+ .ant-picker-cell-selected,
+ .ant-picker-cell-range-start,
+ .ant-picker-cell-range-end,
+ .ant-picker-cell-range-hover-start,
+ .ant-picker-cell-range-hover-end
+ )
+ .ant-picker-cell-inner,
+.ant-picker-time-panel-column > li.ant-picker-time-panel-cell .ant-picker-time-panel-cell-inner:hover {
+ background: var(--secondary-3000);
+
+ [theme='dark'] & {
background: var(--muted-3000-dark);
}
+}
- .ant-picker-cell.ant-picker-cell-selected .ant-picker-cell-inner {
- color: var(--default);
- background: rgba(#f7a503, 0.4);
- }
+.ant-picker-footer .ant-btn-primary {
+ color: var(--primary);
+ text-shadow: none;
+ background: none;
+ border-color: var(--primary);
+ border-radius: 0.25rem;
+ box-shadow: none;
+}
- .ant-picker-time-panel-column > li.ant-picker-time-panel-cell .ant-picker-time-panel-cell-inner:hover {
- background: var(--muted-3000-dark);
- }
+.ant-picker-footer .ant-btn-primary:not(:disabled):hover {
+ color: #fff;
+ background: var(--primary);
+}
- .ant-picker-cell.ant-picker-cell-today .ant-picker-cell-inner::before {
- background: none;
- border-color: var(--text-secondary-3000);
- }
+.ant-picker-footer .ant-picker-now-btn:hover {
+ color: var(--primary);
+}
+
+.ant-picker-ok .ant-btn-primary span {
+ text-transform: uppercase;
}
diff --git a/frontend/src/lib/components/NotFound/index.tsx b/frontend/src/lib/components/NotFound/index.tsx
index f1360ca38c88d..f1bc3cc9a3c2c 100644
--- a/frontend/src/lib/components/NotFound/index.tsx
+++ b/frontend/src/lib/components/NotFound/index.tsx
@@ -21,20 +21,16 @@ export function NotFound({ object, caption }: NotFoundProps): JSX.Element {
const nodeLogic = useNotebookNode()
return (
-
+
{!nodeLogic ?
: null}
-
{capitalizeFirstLetter(object)} not found
- {!nodeLogic ? (
-
- It seems this {object} may have been lost in space.
-
- ) : null}
-
-
+
{capitalizeFirstLetter(object)} not found
+ {!nodeLogic ?
It might be lost in space.
: null}
+
{caption || (
<>
- It's possible this {object} may have been deleted or its sharing settings changed. Please check
- with the person who sent you here
+ It's possible this {object} has been deleted or its sharing settings have changed.
+
+ Please check with the person who sent you here
{preflight?.cloud ? (
<>
, or openSupportForm({ kind: 'support' })}>contact support{' '}
@@ -45,13 +41,13 @@ export function NotFound({ object, caption }: NotFoundProps): JSX.Element {
>
)}
-
- {nodeLogic && (
+ {nodeLogic && (
+
Remove from Notebook
- )}
-
+
+ )}
)
}
diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss
index d219c76b01c8f..d1f0ae7c48181 100644
--- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss
+++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.scss
@@ -22,21 +22,15 @@
}
.editable {
- text-decoration: underline dotted;
- text-decoration-color: var(--primary-3000);
+ padding: 0.125rem 0.25rem;
+ margin-left: -0.25rem;
cursor: pointer;
+ border: 1px solid transparent;
+ border-radius: calc(var(--radius) * 0.75);
- .posthog-3000 & {
- padding: 0.125rem 0.25rem;
- margin-left: -0.25rem;
- text-decoration: none;
- border: 1px solid transparent;
- border-radius: calc(var(--radius) * 0.75);
-
- &:hover {
- background: var(--bg-light);
- border: 1px solid var(--border-light);
- }
+ &:hover {
+ background: var(--bg-light);
+ border: 1px solid var(--border-light);
}
}
}
diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
index 021a9eca39145..563ab6b8f370a 100644
--- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
+++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
@@ -402,7 +402,6 @@ export function PropertiesTable({
({
label: value,
value: key,
+ 'data-attr': `support-form-target-area-${key}`,
}))}
/>
+ {posthog.getFeatureFlag('show-troubleshooting-docs-in-support-form') === 'test-replay-banner' &&
+ sendSupportRequest.target_area === 'session_replay' && (
+
+ <>
+ We're pretty proud of our docs. Check out these helpful links:
+
+
+
+ Session replay troubleshooting
+
+
+
+
+ How to control which sessions you record
+
+
+
+ >
+
+ )}
+ {posthog.getFeatureFlag('show-troubleshooting-docs-in-support-form') === 'test-replay-banner' &&
+ sendSupportRequest.target_area === 'toolbar' && (
+
+ <>
+ We're pretty proud of our docs.{' '}
+
+ Check out this troubleshooting guide
+
+ >
+
+ )}
.LemonCheckbox__box {
flex-shrink: 0;
width: 1rem;
height: 1rem;
background: var(--bg-light);
border: 1.5px solid var(--border-bold);
- border-radius: 0.1875rem; // Intentionally a bit smaller than --radius
+ border-radius: 0.25rem; // Intentionally a bit smaller than --radius
transition: border 200ms ease, background 200ms ease;
- &.posthog-3000 {
- border-radius: 0.25rem; // Intentionally a bit smaller than --radius
- }
-
path {
stroke: var(--bg-light);
stroke-dasharray: var(--tick-length);
diff --git a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss
index 72c00bb67513a..f5b6972d48465 100644
--- a/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss
+++ b/frontend/src/lib/lemon-ui/LemonCollapse/LemonCollapse.scss
@@ -24,7 +24,7 @@
font-weight: 500 !important; // Override status="stealth"'s font-weight
border-radius: 0 !important;
- .posthog-3000 &.LemonButton:active {
+ &.LemonButton:active {
transform: inherit;
}
}
@@ -33,12 +33,9 @@
box-sizing: content-box;
height: 0;
overflow: hidden;
+ background: var(--bg-light);
border-top-width: 1px;
transition: height 200ms ease;
-
- .posthog-3000 & {
- background: var(--bg-light);
- }
}
.LemonCollapsePanel__content {
diff --git a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss
index 6018b10f41bb7..28a84357dadeb 100644
--- a/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss
+++ b/frontend/src/lib/lemon-ui/LemonInput/LemonInput.scss
@@ -3,7 +3,7 @@
gap: 0.25rem;
align-items: center;
justify-content: center;
- height: 2.5rem;
+ height: calc(2.125rem + 3px); // Medium size button height + button shadow height
padding: 0.25rem 0.5rem;
font-size: 0.875rem;
line-height: 1.25rem;
@@ -15,24 +15,12 @@
border: 1px solid var(--border);
border-radius: var(--radius);
- .posthog-3000 & {
- height: calc(2.125rem + 3px); // Medium size button height + button shadow height
- }
-
&:hover:not([aria-disabled='true']) {
- border-color: var(--primary-3000-hover);
-
- .posthog-3000 & {
- border-color: var(--border-bold);
- }
+ border-color: var(--border-bold);
}
&.LemonInput--focused:not([aria-disabled='true']) {
- border-color: var(--primary-3000);
-
- .posthog-3000 & {
- border-color: var(--border-active);
- }
+ border-color: var(--border-active);
}
&.LemonInput--transparent-background {
diff --git a/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss b/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss
index d7c3b21b6ba00..a10cc87433af0 100644
--- a/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss
+++ b/frontend/src/lib/lemon-ui/LemonModal/LemonModal.scss
@@ -33,7 +33,7 @@
max-height: 90%;
margin: 1rem auto;
background-color: var(--bg-light);
- border: 1px solid var(--border-3000);
+ border: 1px solid var(--secondary-3000-button-border);
border-radius: var(--radius);
box-shadow: var(--modal-shadow-elevation);
opacity: 0;
@@ -91,10 +91,6 @@
height: 100%;
overflow: hidden;
}
-
- .posthog-3000 & {
- border-color: var(--secondary-3000-button-border);
- }
}
.LemonModal__header {
diff --git a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss
index 087aa0f39c5d8..67200ac17bdf3 100644
--- a/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss
+++ b/frontend/src/lib/lemon-ui/LemonSelectMultiple/LemonSelectMultiple.scss
@@ -4,7 +4,7 @@
.ant-select-selector,
&.ant-select-single .ant-select-selector {
- min-height: 2.5rem;
+ min-height: 2.125rem;
padding: 0.25rem;
font-size: 0.875rem;
line-height: 1.25rem;
@@ -13,10 +13,6 @@
border: 1px solid var(--border);
border-radius: var(--radius);
- .posthog-3000 & {
- min-height: 2.125rem;
- }
-
.ant-select-selection-overflow {
gap: 0.25rem;
}
@@ -71,13 +67,9 @@
padding: 0.5rem;
margin: -4px 0; // Counteract antd wrapper
background: var(--bg-light);
- border: 1px solid var(--primary);
+ border: 1px solid var(--primary-3000);
border-radius: var(--radius);
- .posthog-3000 & {
- border: 1px solid var(--primary-3000);
- }
-
.ant-select-item {
padding: 0;
padding-bottom: 0.2rem;
diff --git a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss
index eccd270e72e1f..a63c3e5fdc63c 100644
--- a/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss
+++ b/frontend/src/lib/lemon-ui/LemonSwitch/LemonSwitch.scss
@@ -1,6 +1,6 @@
.LemonSwitch {
- --lemon-switch-height: 1.25rem;
- --lemon-switch-width: 2.25rem;
+ --lemon-switch-height: 1.125rem;
+ --lemon-switch-width: calc(11 / 6 * var(--lemon-switch-height)); // Same proportion as in IconToggle
display: flex;
gap: 0.5rem;
@@ -23,17 +23,13 @@
}
&.LemonSwitch--bordered {
- min-height: 2.5rem;
+ min-height: calc(2.125rem + 3px); // Medium size button height + button shadow height
padding: 0 0.75rem;
line-height: 1.4;
background: var(--bg-light);
border: 1px solid var(--border);
border-radius: var(--radius);
- .posthog-3000 & {
- min-height: calc(2.125rem + 3px); // Medium size button height + button shadow height
- }
-
&.LemonSwitch--small {
gap: 0.5rem;
min-height: 2rem;
@@ -54,11 +50,6 @@
cursor: not-allowed; // A label with for=* also toggles the switch, so it shouldn't have the text select cursor
}
}
-
- .posthog-3000 & {
- --lemon-switch-height: 1.125rem;
- --lemon-switch-width: calc(11 / 6 * var(--lemon-switch-height)); // Same proportion as in IconToggle
- }
}
.LemonSwitch__button {
@@ -79,91 +70,57 @@
.LemonSwitch__slider {
position: absolute;
- top: 5px;
+ top: 0;
left: 0;
display: inline-block;
- width: 2.25rem;
- height: 0.625rem;
- background-color: var(--border);
- border-radius: 0.625rem;
+ width: 100%;
+ height: 100%;
+ pointer-events: none;
+ background-color: var(--border-bold);
+ border-radius: var(--lemon-switch-height);
transition: background-color 100ms ease;
- .posthog-3000 & {
- top: 0;
- width: 100%;
- height: 100%;
- pointer-events: none;
- background-color: var(--border-bold);
- border-radius: var(--lemon-switch-height);
- }
-
.LemonSwitch--checked & {
- background-color: var(--primary-highlight);
-
- .posthog-3000 & {
- background-color: var(--primary-3000);
- }
+ background-color: var(--primary-3000);
}
}
.LemonSwitch__handle {
+ --lemon-switch-handle-ratio: calc(3 / 4); // Same proportion as in IconToggle
+ --lemon-switch-handle-gutter: calc(var(--lemon-switch-height) * calc(1 - var(--lemon-switch-handle-ratio)) / 2);
+ --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio));
+ --lemon-switch-active-translate: translateX(
+ calc(var(--lemon-switch-width) - var(--lemon-switch-handle-width) - var(--lemon-switch-handle-gutter) * 2)
+ );
+
position: absolute;
- top: 0;
- left: 0;
+ top: var(--lemon-switch-handle-gutter);
+ left: var(--lemon-switch-handle-gutter);
display: flex;
align-items: center;
justify-content: center;
- width: 1.25rem;
- height: 1.25rem;
+ width: var(--lemon-switch-handle-width);
+ height: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio));
+ pointer-events: none;
cursor: inherit;
background-color: #fff;
- border: 2px solid var(--border);
+ border: none;
border-radius: 0.625rem;
transition: background-color 100ms ease, transform 100ms ease, width 100ms ease, border-color 100ms ease;
- .posthog-3000 & {
- --lemon-switch-handle-ratio: calc(3 / 4); // Same proportion as in IconToggle
- --lemon-switch-handle-gutter: calc(var(--lemon-switch-height) * calc(1 - var(--lemon-switch-handle-ratio)) / 2);
- --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio));
- --lemon-switch-active-translate: translateX(
- calc(var(--lemon-switch-width) - var(--lemon-switch-handle-width) - var(--lemon-switch-handle-gutter) * 2)
- );
-
- top: var(--lemon-switch-handle-gutter);
- left: var(--lemon-switch-handle-gutter);
- width: var(--lemon-switch-handle-width);
- height: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio));
- pointer-events: none;
- background-color: #fff;
- border: none;
- }
-
.LemonSwitch--checked & {
- background-color: var(--primary-3000);
+ background-color: #fff;
border-color: var(--primary-3000);
- transform: translateX(1rem);
-
- .posthog-3000 & {
- background-color: #fff;
- transform: var(--lemon-switch-active-translate);
- }
+ transform: var(--lemon-switch-active-translate);
}
.LemonSwitch--active & {
- transform: scale(1.1);
-
- .posthog-3000 & {
- --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio) * 1.2);
+ --lemon-switch-handle-width: calc(var(--lemon-switch-height) * var(--lemon-switch-handle-ratio) * 1.2);
- transform: none;
- }
+ transform: none;
}
.LemonSwitch--active.LemonSwitch--checked & {
- transform: translateX(1rem) scale(1.1);
-
- .posthog-3000 & {
- transform: var(--lemon-switch-active-translate);
- }
+ transform: var(--lemon-switch-active-translate);
}
}
diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss
index 2bf5449f0b4aa..b944d2f17635e 100644
--- a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss
+++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.scss
@@ -1,5 +1,5 @@
.LemonTable {
- --row-base-height: 3rem;
+ --row-base-height: auto;
--row-horizontal-padding: 1rem;
--lemon-table-background-color: var(--bg-table);
@@ -7,6 +7,7 @@
flex: 1;
width: 100%;
overflow: hidden;
+ font-size: 13px;
background: var(--lemon-table-background-color);
border: 1px solid var(--border);
border-radius: var(--radius);
@@ -24,12 +25,6 @@
border: none;
}
- .posthog-3000 & {
- --row-base-height: auto;
-
- font-size: 13px;
- }
-
&.LemonTable--with-ribbon {
--row-ribbon-width: 0.25rem;
@@ -49,18 +44,12 @@
}
&--xs {
- --row-base-height: 2rem;
-
.LemonTable__content > table > tbody > tr > td {
padding-top: 0.25rem;
padding-bottom: 0.25rem;
}
}
- &--small {
- --row-base-height: 2.5rem;
- }
-
&--embedded {
background: none;
border: none;
@@ -72,11 +61,8 @@
.LemonTable__content > table {
> thead {
+ background: none;
border-bottom: none;
-
- .posthog-3000 & {
- background: none;
- }
}
> thead,
@@ -127,12 +113,10 @@
}
a.Link {
- .posthog-3000 & {
- color: var(--default);
+ color: var(--default);
- &:not(:disabled):hover {
- color: var(--primary-3000-hover);
- }
+ &:not(:disabled):hover {
+ color: var(--primary-3000-hover);
}
}
}
@@ -147,14 +131,12 @@
font-size: 0.75rem;
text-transform: uppercase;
letter-spacing: 0.03125rem;
- background: var(--mid);
-
- .posthog-3000 & {
- background: var(--lemon-table-background-color);
- }
+ background: var(--lemon-table-background-color);
> tr {
> th {
+ padding-top: 0.5rem;
+ padding-bottom: 0.5rem;
font-weight: 700;
text-align: left;
@@ -162,15 +144,8 @@
// Also it needs to be on the th - any higher and safari will not render the shadow
box-shadow: inset 0 -1px var(--border);
- .posthog-3000 & {
- padding-top: 0.5rem;
- padding-bottom: 0.5rem;
- }
-
.LemonButton {
- .posthog-3000 & {
- margin: -0.5rem 0;
- }
+ margin: -0.5rem 0;
}
}
@@ -293,30 +268,26 @@
.LemonTable__header {
cursor: default;
- .posthog-3000 & {
- .LemonTable__header-content {
- color: var(--text-secondary);
- }
+ .LemonTable__header-content {
+ color: var(--text-secondary);
}
&.LemonTable__header--actionable {
cursor: pointer;
- .posthog-3000 & {
- &:hover {
- &:not(:has(.LemonTable__header--no-hover:hover)) {
- .LemonTable__header-content {
- color: var(--default);
- }
- }
- }
-
- &:active {
+ &:hover {
+ &:not(:has(.LemonTable__header--no-hover:hover)) {
.LemonTable__header-content {
color: var(--default);
}
}
}
+
+ &:active {
+ .LemonTable__header-content {
+ color: var(--default);
+ }
+ }
}
}
@@ -346,11 +317,7 @@
}
.LemonTable__header--sticky::before {
- background: var(--mid);
-
- .posthog-3000 & {
- background: var(--lemon-table-background-color);
- }
+ background: var(--lemon-table-background-color);
}
// Stickiness is disabled in snapshots due to flakiness
diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.stories.tsx b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.stories.tsx
index b0c604b5e2cf7..a9c360b431cfb 100644
--- a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.stories.tsx
+++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.stories.tsx
@@ -176,9 +176,6 @@ WithExpandableRows.args = {
export const Small: Story = BasicTemplate.bind({})
Small.args = { size: 'small' }
-export const XSmall: Story = BasicTemplate.bind({})
-XSmall.args = { size: 'xs' }
-
export const Embedded: Story = BasicTemplate.bind({})
Embedded.args = { embedded: true }
diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.tsx b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.tsx
index 7cba42509ac08..8fb3ee58a65ae 100644
--- a/frontend/src/lib/lemon-ui/LemonTable/LemonTable.tsx
+++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTable.tsx
@@ -47,7 +47,7 @@ export interface LemonTableProps> {
/** Function that for each row determines what props should its `tr` element have based on the row's record. */
onRow?: (record: T) => Omit, 'key'>
/** How tall should rows be. The default value is `"middle"`. */
- size?: 'xs' | 'small' | 'middle'
+ size?: 'small' | 'middle'
/** Whether this table already is inset, meaning it needs reduced horizontal padding (0.5rem instead of 1rem). */
inset?: boolean
/** An embedded table has no border around it and no background. This way it blends better into other components. */
diff --git a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss
index c75c739b37653..49a382f7bd061 100644
--- a/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss
+++ b/frontend/src/lib/lemon-ui/LemonTable/LemonTableLoader.scss
@@ -7,14 +7,10 @@
height: 0;
padding: 0.05rem !important;
overflow: hidden;
- background: var(--primary-bg-active);
+ background: var(--primary-3000-highlight);
border: none !important;
transition: height 200ms ease, top 200ms ease;
- .posthog-3000 & {
- background: var(--primary-3000-highlight);
- }
-
&::after {
position: absolute;
top: 0;
@@ -22,12 +18,8 @@
width: 50%;
height: 100%;
content: '';
- background: var(--primary);
+ background: var(--primary-3000);
animation: LemonTableLoader__swooping 1.5s linear infinite;
-
- .posthog-3000 & {
- background: var(--primary-3000);
- }
}
&.LemonTableLoader--enter-active,
diff --git a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss
index 93f2d8a133165..807b7765e3420 100644
--- a/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss
+++ b/frontend/src/lib/lemon-ui/LemonTag/LemonTag.scss
@@ -25,14 +25,9 @@
}
&.LemonTag--primary {
- color: #fff;
- background-color: var(--primary-3000);
-
- .posthog-3000 & {
- color: var(--primary-3000);
- background: none;
- border-color: var(--primary-3000);
- }
+ color: var(--primary-3000);
+ background: none;
+ border-color: var(--primary-3000);
}
&.LemonTag--option {
@@ -41,69 +36,39 @@
}
&.LemonTag--highlight {
- color: var(--bg-charcoal);
- background-color: var(--mark);
-
- .posthog-3000 & {
- color: var(--highlight);
- background: none;
- border-color: var(--highlight);
- }
+ color: var(--highlight);
+ background: none;
+ border-color: var(--highlight);
}
&.LemonTag--warning {
- color: var(--bg-charcoal);
- background-color: var(--warning);
-
- .posthog-3000 & {
- color: var(--warning);
- background: none;
- border-color: var(--warning);
- }
+ color: var(--warning);
+ background-color: none;
+ border-color: var(--warning);
}
&.LemonTag--danger {
- color: #fff;
- background-color: var(--danger);
-
- .posthog-3000 & {
- color: var(--danger);
- background: none;
- border-color: var(--danger);
- }
+ color: var(--danger);
+ background: none;
+ border-color: var(--danger);
}
&.LemonTag--success {
- color: #fff;
- background-color: var(--success);
-
- .posthog-3000 & {
- color: var(--success);
- background: none;
- border-color: var(--success);
- }
+ color: var(--success);
+ background: none;
+ border-color: var(--success);
}
&.LemonTag--completion {
- color: var(--bg-charcoal);
- background-color: var(--purple-light);
-
- .posthog-3000 & {
- color: var(--purple);
- background: none;
- border-color: var(--purple);
- }
+ color: var(--purple);
+ background: none;
+ border-color: var(--purple);
}
&.LemonTag--caution {
- color: var(--bg-charcoal);
- background-color: var(--danger-lighter);
-
- .posthog-3000 & {
- color: var(--danger-lighter);
- background: none;
- border-color: var(--danger-lighter);
- }
+ color: var(--danger-lighter);
+ background: none;
+ border-color: var(--danger-lighter);
}
&.LemonTag--muted {
diff --git a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss
index 3c24f908503e9..9c89b575b4a0b 100644
--- a/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss
+++ b/frontend/src/lib/lemon-ui/LemonTextArea/LemonTextArea.scss
@@ -17,11 +17,7 @@
transition: background-color 200ms ease, color 200ms ease, border 200ms ease, opacity 200ms ease;
&:not(:disabled):hover {
- border: 1px solid var(--primary-3000-hover);
-
- .posthog-3000 & {
- border-color: var(--border-bold);
- }
+ border: 1px solid var(--border-bold);
}
&:disabled {
@@ -30,11 +26,7 @@
}
&:focus:not(:disabled) {
- border: 1px solid var(--primary-3000);
-
- .posthog-3000 & {
- border-color: var(--border-active);
- }
+ border: 1px solid var(--border-active);
}
.Field--error & {
diff --git a/frontend/src/lib/lemon-ui/Link/Link.scss b/frontend/src/lib/lemon-ui/Link/Link.scss
index 13969c9df18b1..24a0bf5f65522 100644
--- a/frontend/src/lib/lemon-ui/Link/Link.scss
+++ b/frontend/src/lib/lemon-ui/Link/Link.scss
@@ -28,12 +28,10 @@
}
&--subtle {
- .posthog-3000 & {
- color: var(--default);
+ color: var(--default);
- &:not(:disabled):hover {
- color: var(--primary-3000-hover);
- }
+ &:not(:disabled):hover {
+ color: var(--primary-3000-hover);
}
}
}
diff --git a/frontend/src/lib/lemon-ui/Spinner/Spinner.scss b/frontend/src/lib/lemon-ui/Spinner/Spinner.scss
index e5ddf2a3cb175..3ab31c08e9be6 100644
--- a/frontend/src/lib/lemon-ui/Spinner/Spinner.scss
+++ b/frontend/src/lib/lemon-ui/Spinner/Spinner.scss
@@ -78,7 +78,7 @@
position: relative;
}
- .posthog-3000 &.SpinnerOverlay--scene-level::before {
+ &.SpinnerOverlay--scene-level::before {
background: var(--bg-3000);
}
}
diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Charts/LineGraph.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Charts/LineGraph.tsx
index 0e5066658245e..a7c5e4d9237f7 100644
--- a/frontend/src/queries/nodes/DataVisualization/Components/Charts/LineGraph.tsx
+++ b/frontend/src/queries/nodes/DataVisualization/Components/Charts/LineGraph.tsx
@@ -203,7 +203,6 @@ export const LineGraph = (): JSX.Element => {
},
},
]}
- size="small"
uppercaseHeader={false}
rowRibbonColor={(_datum, index) => getSeriesColor(index)}
showHeader
diff --git a/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx b/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx
index 30df1eb1c3424..45cb608451271 100644
--- a/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx
+++ b/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx
@@ -205,10 +205,6 @@ export function HogQLQueryEditor(props: HogQLQueryEditorProps): JSX.Element {
monaco.languages.registerCompletionItemProvider('mysql', {
triggerCharacters: [' ', ',', '.'],
provideCompletionItems: async (model, position) => {
- if (!logic.isMounted()) {
- return undefined
- }
-
if (!featureFlags[FEATURE_FLAGS.HOGQL_AUTOCOMPLETE]) {
return undefined
}
@@ -226,7 +222,7 @@ export function HogQLQueryEditor(props: HogQLQueryEditorProps): JSX.Element {
const response = await query({
kind: NodeKind.HogQLAutocomplete,
- select: logic.values.queryInput,
+ select: model.getValue(), // Use the text from the model instead of logic due to a race condition on the logic values updating quick enough
filters: props.query.filters,
startPosition: startOffset,
endPosition: endOffset,
diff --git a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss
index 618d3c39bca1f..8eface52ec75f 100644
--- a/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss
+++ b/frontend/src/queries/nodes/InsightViz/PropertyGroupFilters/PropertyGroupFilters.scss
@@ -2,11 +2,8 @@
.property-group {
padding: 0.5rem;
background-color: var(--side);
+ border-width: 1px;
border-radius: var(--radius);
-
- .posthog-3000 & {
- border-width: 1px;
- }
}
.property-group-and-or-separator {
diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json
index 8d5c567d9d999..98d40729fc536 100644
--- a/frontend/src/queries/schema.json
+++ b/frontend/src/queries/schema.json
@@ -4993,6 +4993,9 @@
"WebOverviewQuery": {
"additionalProperties": false,
"properties": {
+ "compare": {
+ "type": "boolean"
+ },
"dateRange": {
"$ref": "#/definitions/DateRange"
},
diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts
index 82e230be29ab4..44912c3862bf0 100644
--- a/frontend/src/queries/schema.ts
+++ b/frontend/src/queries/schema.ts
@@ -968,6 +968,7 @@ export interface WebAnalyticsQueryBase {
export interface WebOverviewQuery extends WebAnalyticsQueryBase {
kind: NodeKind.WebOverviewQuery
response?: WebOverviewQueryResponse
+ compare?: boolean
}
export interface WebOverviewItem {
diff --git a/frontend/src/scenes/batch_exports/BatchExportScene.tsx b/frontend/src/scenes/batch_exports/BatchExportScene.tsx
index c3c0bb68228f0..039a47a104354 100644
--- a/frontend/src/scenes/batch_exports/BatchExportScene.tsx
+++ b/frontend/src/scenes/batch_exports/BatchExportScene.tsx
@@ -125,7 +125,6 @@ export function RunsTab(): JSX.Element {
diff --git a/frontend/src/scenes/billing/BillingProduct.tsx b/frontend/src/scenes/billing/BillingProduct.tsx
index 088fc2a11a2ea..25a6d6dcaefba 100644
--- a/frontend/src/scenes/billing/BillingProduct.tsx
+++ b/frontend/src/scenes/billing/BillingProduct.tsx
@@ -536,7 +536,7 @@ export const BillingProduct = ({ product }: { product: BillingProductV2Type }):
}
/>
-
diff --git a/frontend/src/scenes/dashboard/EmptyDashboardComponent.scss b/frontend/src/scenes/dashboard/EmptyDashboardComponent.scss
index b8e3359b89362..a96eade91b6d6 100644
--- a/frontend/src/scenes/dashboard/EmptyDashboardComponent.scss
+++ b/frontend/src/scenes/dashboard/EmptyDashboardComponent.scss
@@ -11,13 +11,11 @@
overflow: hidden;
&::after {
+ --bg-light: var(--bg-3000); // Make the fade blend in with the 3000 background smoothly
+
width: 100%;
height: 150px;
- .posthog-3000 & {
- --bg-light: var(--bg-3000); // Make the fade blend in with the 3000 background smoothly
- }
-
@extend %mixin-gradient-overlay;
}
}
diff --git a/frontend/src/scenes/data-management/database/DatabaseTable.tsx b/frontend/src/scenes/data-management/database/DatabaseTable.tsx
index bbf5c67db647e..83b70c8abb41c 100644
--- a/frontend/src/scenes/data-management/database/DatabaseTable.tsx
+++ b/frontend/src/scenes/data-management/database/DatabaseTable.tsx
@@ -13,7 +13,6 @@ interface DatabaseTableProps {
export function DatabaseTable({ table, tables }: DatabaseTableProps): JSX.Element {
return (
name === table)?.columns ?? []}
columns={[
{
diff --git a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx
index bb635d6e8fd55..f9dd1830383b8 100644
--- a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx
+++ b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx
@@ -239,7 +239,6 @@ function RenderNestedWarnings(warningSummary: IngestionWarningSummary): JSX.Elem
},
]}
embedded
- size="small"
showHeader={false}
/>
)
diff --git a/frontend/src/scenes/events/Events.tsx b/frontend/src/scenes/events/Events.tsx
index ae21bad463961..4ae39dc817fa1 100644
--- a/frontend/src/scenes/events/Events.tsx
+++ b/frontend/src/scenes/events/Events.tsx
@@ -14,7 +14,6 @@ export function Events(): JSX.Element {
return (
<>
-
>
diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx
index 7aeee10e75204..ee658db1b2061 100644
--- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx
+++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx
@@ -275,7 +275,6 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
}
/>
-
{featureFlag.experiment_set && featureFlag.experiment_set?.length > 0 && (
This feature flag is linked to an experiment. Edit settings here only for advanced
@@ -285,7 +284,7 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
)}
-
+
datum.color || null}
showHeader={showHeader}
@@ -239,7 +238,6 @@ export function InsightTooltip({
dataSource={dataSource.slice(0, rowCutoff)}
columns={columns}
rowKey="id"
- size="small"
className="ph-no-capture"
uppercaseHeader={false}
rowRibbonColor={hideColorCol ? undefined : (datum: SeriesDatum) => datum.color || null}
diff --git a/frontend/src/scenes/insights/utils.tsx b/frontend/src/scenes/insights/utils.tsx
index b249415792a49..50cc8fc63ec8c 100644
--- a/frontend/src/scenes/insights/utils.tsx
+++ b/frontend/src/scenes/insights/utils.tsx
@@ -263,7 +263,7 @@ export function formatBreakdownLabel(
return cohorts?.filter((c) => c.id == breakdown_value)[0]?.name ?? (breakdown_value || '').toString()
} else if (typeof breakdown_value == 'number') {
return isOtherBreakdown(breakdown_value)
- ? 'Other'
+ ? 'Other (Groups all remaining values)'
: isNullBreakdown(breakdown_value)
? 'None'
: formatPropertyValueForDisplay
@@ -271,7 +271,7 @@ export function formatBreakdownLabel(
: String(breakdown_value)
} else if (typeof breakdown_value == 'string') {
return isOtherBreakdown(breakdown_value) || breakdown_value === 'nan'
- ? 'Other'
+ ? 'Other (Groups all remaining values)'
: isNullBreakdown(breakdown_value) || breakdown_value === ''
? 'None'
: breakdown_value
diff --git a/frontend/src/scenes/pipeline/Destinations.tsx b/frontend/src/scenes/pipeline/Destinations.tsx
index 9f251fe942d1e..149f8a2cc60fb 100644
--- a/frontend/src/scenes/pipeline/Destinations.tsx
+++ b/frontend/src/scenes/pipeline/Destinations.tsx
@@ -52,7 +52,7 @@ function DestinationsTable(): JSX.Element {
<>
= {
+ title: 'Components/ItemPerformanceEvent',
+ component: ItemPerformanceEvent,
+ decorators: [
+ mswDecorator({
+ get: {},
+ }),
+ ],
+}
+export default meta
+
+export function InitialHeadersDisplay(): JSX.Element {
+ return
+}
+
+export function InitialBodyDisplay(): JSX.Element {
+ return (
+
+ )
+}
diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx
index 10c4cb545efe9..76a28a190917c 100644
--- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx
+++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemPerformanceEvent.tsx
@@ -364,6 +364,7 @@ export function ItemPerformanceEvent({
),
},
@@ -374,7 +375,11 @@ export function ItemPerformanceEvent({
),
},
@@ -386,7 +391,11 @@ export function ItemPerformanceEvent({
),
}
@@ -424,7 +433,7 @@ export function ItemPerformanceEvent({
)
}
-function BodyDisplay({
+export function BodyDisplay({
content,
headers,
emptyMessage,
@@ -454,23 +463,26 @@ function BodyDisplay({
)
}
-function HeadersDisplay({
+export function HeadersDisplay({
request,
response,
+ isInitial,
}: {
request: Record | undefined
response: Record | undefined
+ isInitial?: boolean
}): JSX.Element | null {
+ const emptyMessage = isInitial ? 'captured before PostHog was initialized' : 'No headers captured'
return (
Request Headers
-
+
Response Headers
-
+
)
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss
index f0a6e629424ea..d2fcab212dbcb 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.scss
@@ -22,9 +22,7 @@
overflow: hidden;
.text-link {
- .posthog-3000 & {
- color: var(--default);
- }
+ color: var(--default);
}
}
@@ -56,12 +54,8 @@
.SessionRecordingPlaylistHeightWrapper {
// NOTE: Somewhat random way to offset the various headers and tabs above the playlist
- height: calc(100vh - 15rem);
+ height: calc(100vh - 9rem);
min-height: 41rem;
-
- .posthog-3000 & {
- height: calc(100vh - 9rem);
- }
}
.SessionRecordingPreview {
diff --git a/frontend/src/scenes/settings/Settings.scss b/frontend/src/scenes/settings/Settings.scss
index 37b8f3daf2e94..b749083d2ca68 100644
--- a/frontend/src/scenes/settings/Settings.scss
+++ b/frontend/src/scenes/settings/Settings.scss
@@ -2,20 +2,16 @@
display: flex;
gap: 2rem;
align-items: start;
- margin-top: 1rem;
+ margin-top: 0;
.Settings__sections {
position: sticky;
- top: 0.5rem;
+ top: 4rem;
flex-shrink: 0;
width: 20%;
min-width: 14rem;
max-width: 20rem;
- .posthog-3000 & {
- top: 4rem;
- }
-
.SidePanel3000 & {
top: 0;
}
@@ -33,7 +29,6 @@
}
}
- .posthog-3000 &,
.LemonModal & {
margin-top: 0;
}
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts
index fea26dcf8fa8e..7a6dcd00332c2 100644
--- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts
+++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts
@@ -402,7 +402,7 @@ export const webAnalyticsLogic = kea([
date_from: dateFrom,
date_to: dateTo,
}
- const compare = !!(dateRange.date_from && dateRange.date_to)
+ const compare = !!dateRange.date_from && dateRange.date_from !== 'all'
const sampling = {
enabled: !!values.featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_SAMPLING],
@@ -428,6 +428,7 @@ export const webAnalyticsLogic = kea([
properties: webAnalyticsFilters,
dateRange,
sampling,
+ compare,
},
insightProps: createInsightProps(TileId.OVERVIEW),
canOpenModal: false,
diff --git a/frontend/src/styles/global.scss b/frontend/src/styles/global.scss
index 2140b3aad166d..80ad3b4e660e5 100644
--- a/frontend/src/styles/global.scss
+++ b/frontend/src/styles/global.scss
@@ -53,16 +53,12 @@ input[type='radio'] {
gap: 0.5rem 1rem;
align-items: center;
min-height: 2.5rem;
- margin: 1.25rem 0 0.25rem;
+ margin: 1rem 0 0.25rem;
.ant-form-item {
margin-bottom: 0 !important;
}
- .posthog-3000 & {
- margin-top: 1rem;
- }
-
@include screen($md) {
flex-wrap: nowrap;
}
@@ -177,14 +173,10 @@ input::-ms-clear {
font-family: var(--font-sans);
font-size: 1rem;
cursor: unset;
- border: 1px solid var(--border);
+ border: 1px solid var(--secondary-3000-button-border);
border-radius: var(--radius);
box-shadow: var(--shadow-elevation);
opacity: 1 !important;
-
- .posthog-3000 & {
- border-color: var(--secondary-3000-button-border);
- }
}
.Toastify__toast-container {
@@ -481,6 +473,9 @@ body {
--link: var(--primary-3000);
--tooltip-bg: var(--bg-charcoal);
--data-color-1-hover: #1d4affe5;
+ --shadow-elevation: var(--shadow-elevation-3000);
+ --primary: var(--primary-3000);
+ --primary-highlight: var(--primary-3000-highlight);
// Remove below once we're using Tailwind's base
--tw-ring-offset-width: 0px;
@@ -496,69 +491,50 @@ body {
--tw-scale-y: 1;
touch-action: manipulation; // Disable double-tap-to-zoom on mobile, making taps slightly snappier
+ background: var(--bg-3000);
&.posthog-3000[theme='light'] {
@include light-mode-3000-variables;
}
&.posthog-3000[theme='dark'] {
- @include dark-mode-3000-variables;
- }
-
- &.posthog-3000 {
- --shadow-elevation: var(--shadow-elevation-3000);
- --primary: var(--primary-3000);
- --primary-highlight: var(--primary-3000-highlight);
-
- background: var(--bg-3000);
+ .ant-empty-img-simple-path {
+ fill: var(--border-3000);
+ }
- .non-3000 {
- // Helper to hide non-3000 elements without JS
- display: none;
+ .ant-empty-img-simple-ellipse {
+ fill: var(--border-3000);
}
- .LemonButton,
- .Link {
- .text-link {
- color: var(--text-3000);
- }
+ @include dark-mode-3000-variables;
+ }
- &:hover {
- .text-link {
- color: var(--primary-3000);
- }
- }
+ * > {
+ ::-webkit-scrollbar {
+ width: 0.5rem;
+ height: 0.5rem;
}
- * > {
- ::-webkit-scrollbar {
- width: 0.5rem;
- height: 0.5rem;
- }
-
- ::-webkit-scrollbar-track {
- background: var(--accent-3000);
- }
+ ::-webkit-scrollbar-track {
+ background: var(--accent-3000);
+ }
- ::-webkit-scrollbar-thumb {
- background: var(--trace-3000);
- border-radius: var(--radius);
+ ::-webkit-scrollbar-thumb {
+ background: var(--trace-3000);
+ border-radius: var(--radius);
- &:hover {
- background: var(--muted-3000);
- }
+ &:hover {
+ background: var(--muted-3000);
}
}
+ }
- h1,
- h2,
- h3,
- h4,
- h5 {
- font-family: var(--font-title);
- }
-
- @include posthog-3000-variables;
+ h1,
+ h2,
+ h3,
+ h4,
+ h5 {
+ font-family: var(--font-title);
}
h1,
@@ -589,6 +565,19 @@ body {
color: var(--link);
}
+ .LemonButton,
+ .Link {
+ .text-link {
+ color: var(--text-3000);
+ }
+
+ &:hover {
+ .text-link {
+ color: var(--primary-3000);
+ }
+ }
+ }
+
// AntD uses its own border color for the bottom of tab lists, but we want to use `var(--border)`
.ant-tabs-top,
.ant-tabs-bottom {
@@ -707,6 +696,8 @@ body {
.ant-table-tbody > tr.ant-table-placeholder:hover > td {
background: inherit;
}
+
+ @include posthog-3000-variables;
}
.storybook-test-runner {
@@ -721,7 +712,7 @@ body {
}
// Hide some parts of the UI that were causing flakiness
- ::-webkit-scrollbar, // Scrollbar in WebKit/Blink browsers
+ ::-webkit-scrollbar, * > ::-webkit-scrollbar, // Scrollbar in WebKit/Blink browsers
.LemonTabs__bar::after, // Active tab slider
.scrollable::after, // Scrollability indicators
.scrollable::before {
@@ -729,21 +720,9 @@ body {
}
}
-.posthog-3000 {
- .ant-radio-button-wrapper {
- background: var(--secondary-3000);
- border-color: transparent;
- }
-}
-
-.posthog-3000[theme='dark'] {
- .ant-empty-img-simple-path {
- fill: var(--border-3000);
- }
-
- .ant-empty-img-simple-ellipse {
- fill: var(--border-3000);
- }
+.ant-radio-button-wrapper {
+ background: var(--secondary-3000);
+ border-color: transparent;
}
.ligatures-none {
diff --git a/frontend/src/toolbar/elements/Elements.tsx b/frontend/src/toolbar/elements/Elements.tsx
index 904cd148e8ab8..0e8c0a278d67f 100644
--- a/frontend/src/toolbar/elements/Elements.tsx
+++ b/frontend/src/toolbar/elements/Elements.tsx
@@ -2,7 +2,7 @@ import './Elements.scss'
import { useActions, useValues } from 'kea'
import { compactNumber } from 'lib/utils'
-import React from 'react'
+import { Fragment } from 'react'
import { ElementInfoWindow } from '~/toolbar/elements/ElementInfoWindow'
import { elementsLogic } from '~/toolbar/elements/elementsLogic'
@@ -75,7 +75,7 @@ export function Elements(): JSX.Element {
{heatmapElements.map(({ rect, count, clickCount, rageclickCount, element }, index) => {
return (
-
+
)}
-
+
)
})}
diff --git a/frontend/src/toolbar/utils.ts b/frontend/src/toolbar/utils.ts
index 8bb0562c04be8..368e569bb84b5 100644
--- a/frontend/src/toolbar/utils.ts
+++ b/frontend/src/toolbar/utils.ts
@@ -2,7 +2,6 @@ import { finder } from '@medv/finder'
import { CLICK_TARGET_SELECTOR, CLICK_TARGETS, escapeRegex, TAGS_TO_IGNORE } from 'lib/actionUtils'
import { cssEscape } from 'lib/utils/cssEscape'
import { querySelectorAllDeep } from 'query-selector-shadow-dom'
-import wildcardMatch from 'wildcard-match'
import { ActionStepForm, BoxColor, ElementRect } from '~/toolbar/types'
import { ActionStepType, StringMatching } from '~/types'
@@ -43,9 +42,13 @@ export function elementToQuery(element: HTMLElement, dataAttributes: string[]):
try {
return finder(element, {
- attr: (name) => dataAttributes.some((dataAttribute) => wildcardMatch(dataAttribute)(name)),
tagName: (name) => !TAGS_TO_IGNORE.includes(name),
seedMinLength: 5, // include several selectors e.g. prefer .project-homepage > .project-header > .project-title over .project-title
+ attr: (name) => {
+ // preference to data attributes if they exist
+ // that aren't in the PostHog preferred list - they were returned early above
+ return name.startsWith('data-')
+ },
})
} catch (error) {
console.warn('Error while trying to find a selector for element', element, error)
diff --git a/mypy-baseline.txt b/mypy-baseline.txt
index 46f86c337eadf..e64ff9ef15c81 100644
--- a/mypy-baseline.txt
+++ b/mypy-baseline.txt
@@ -365,11 +365,9 @@ posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" h
posthog/hogql/query.py:0: error: "SelectQuery" has no attribute "select_queries" [attr-defined]
posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectUnionQuery" cannot exist: would have incompatible method signatures [unreachable]
posthog/hogql/autocomplete.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/hogql/autocomplete.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Value of type "list[Any] | None" is not indexable [index]
-posthog/hogql_queries/insights/funnels/base.py:0: error: Incompatible types in assignment (expression has type "FunnelExclusionEventsNode | FunnelExclusionActionsNode", variable has type "EventsNode | ActionsNode") [assignment]
-posthog/hogql_queries/insights/funnels/base.py:0: error: Item "EventsNode" of "EventsNode | ActionsNode" has no attribute "funnelFromStep" [union-attr]
-posthog/hogql_queries/insights/funnels/base.py:0: error: Item "ActionsNode" of "EventsNode | ActionsNode" has no attribute "funnelFromStep" [union-attr]
posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable]
posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr]
posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_histogram_bin_count" [union-attr]
@@ -404,6 +402,9 @@ posthog/hogql_queries/events_query_runner.py:0: error: Statement is unreachable
posthog/hogql/metadata.py:0: error: Argument "metadata_source" to "translate_hogql" has incompatible type "SelectQuery | SelectUnionQuery"; expected "SelectQuery | None" [arg-type]
posthog/hogql/metadata.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment]
posthog/queries/breakdown_props.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type]
+posthog/hogql_queries/insights/funnels/base.py:0: error: Incompatible types in assignment (expression has type "FunnelExclusionEventsNode | FunnelExclusionActionsNode", variable has type "EventsNode | ActionsNode") [assignment]
+posthog/hogql_queries/insights/funnels/base.py:0: error: Item "EventsNode" of "EventsNode | ActionsNode" has no attribute "funnelFromStep" [union-attr]
+posthog/hogql_queries/insights/funnels/base.py:0: error: Item "ActionsNode" of "EventsNode | ActionsNode" has no attribute "funnelFromStep" [union-attr]
posthog/queries/funnels/base.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined]
posthog/queries/funnels/base.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type]
ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unreachable [unreachable]
diff --git a/package.json b/package.json
index 156d5116aeb27..62d4f9f2d41e8 100644
--- a/package.json
+++ b/package.json
@@ -71,7 +71,7 @@
"@dnd-kit/utilities": "^3.2.1",
"@floating-ui/react": "^0.16.0",
"@lottiefiles/react-lottie-player": "^3.4.7",
- "@medv/finder": "^2.1.0",
+ "@medv/finder": "^3.1.0",
"@microlink/react-json-view": "^1.21.3",
"@monaco-editor/react": "4.4.6",
"@posthog/icons": "0.5.1",
@@ -173,7 +173,6 @@
"tailwindcss": "^3.4.0",
"use-debounce": "^9.0.3",
"use-resize-observer": "^8.0.0",
- "wildcard-match": "^5.1.2",
"zxcvbn": "^4.4.2"
},
"devDependencies": {
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index d5d76b16d0a40..180a3a515a9d6 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -35,8 +35,8 @@ dependencies:
specifier: ^3.4.7
version: 3.4.7(react@18.2.0)
'@medv/finder':
- specifier: ^2.1.0
- version: 2.1.0
+ specifier: ^3.1.0
+ version: 3.1.0
'@microlink/react-json-view':
specifier: ^1.21.3
version: 1.22.2(@types/react@17.0.52)(react-dom@18.2.0)(react@18.2.0)
@@ -340,9 +340,6 @@ dependencies:
use-resize-observer:
specifier: ^8.0.0
version: 8.0.0(react-dom@18.2.0)(react@18.2.0)
- wildcard-match:
- specifier: ^5.1.2
- version: 5.1.2
zxcvbn:
specifier: ^4.4.2
version: 4.4.2
@@ -4921,8 +4918,8 @@ packages:
react: 18.2.0
dev: true
- /@medv/finder@2.1.0:
- resolution: {integrity: sha512-Egrg5XO4kLol24b1Kv50HDfi5hW0yQ6aWSsO0Hea1eJ4rogKElIN0M86FdVnGF4XIGYyA7QWx0MgbOzVPA0qkA==}
+ /@medv/finder@3.1.0:
+ resolution: {integrity: sha512-ojkXjR3K0Zz3jnCR80tqPL+0yvbZk/lEodb6RIVjLz7W8RVA2wrw8ym/CzCpXO9SYVUIKHFUpc7jvf8UKfIM3w==}
dev: false
/@microlink/react-json-view@1.22.2(@types/react@17.0.52)(react-dom@18.2.0)(react@18.2.0):
@@ -21383,10 +21380,6 @@ packages:
isexe: 2.0.0
dev: true
- /wildcard-match@5.1.2:
- resolution: {integrity: sha512-qNXwI591Z88c8bWxp+yjV60Ch4F8Riawe3iGxbzquhy8Xs9m+0+SLFBGb/0yCTIDElawtaImC37fYZ+dr32KqQ==}
- dev: false
-
/wildcard@2.0.0:
resolution: {integrity: sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==}
dev: true
diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py
index 45aebf355776b..4165f6b680687 100644
--- a/posthog/api/test/dashboards/test_dashboard.py
+++ b/posthog/api/test/dashboards/test_dashboard.py
@@ -894,17 +894,18 @@ def test_dashboard_duplication_can_duplicate_tiles_without_editing_name_if_there
def test_dashboard_duplication(self):
existing_dashboard = Dashboard.objects.create(team=self.team, name="existing dashboard", created_by=self.user)
insight1 = Insight.objects.create(filters={"name": "test1"}, team=self.team, last_refresh=now())
- DashboardTile.objects.create(dashboard=existing_dashboard, insight=insight1)
+ tile1 = DashboardTile.objects.create(dashboard=existing_dashboard, insight=insight1)
insight2 = Insight.objects.create(filters={"name": "test2"}, team=self.team, last_refresh=now())
- DashboardTile.objects.create(dashboard=existing_dashboard, insight=insight2)
+ tile2 = DashboardTile.objects.create(dashboard=existing_dashboard, insight=insight2)
_, response = self.dashboard_api.create_dashboard({"name": "another", "use_dashboard": existing_dashboard.pk})
self.assertEqual(response["creation_mode"], "duplicate")
self.assertEqual(len(response["tiles"]), len(existing_dashboard.insights.all()))
- existing_dashboard_item_id_set = set(map(lambda x: x.id, existing_dashboard.insights.all()))
+ existing_dashboard_item_id_set = {tile1.pk, tile2.pk}
response_item_id_set = set(map(lambda x: x.get("id", None), response["tiles"]))
# check both sets are disjoint to verify that the new items' ids are different than the existing items
+
self.assertTrue(existing_dashboard_item_id_set.isdisjoint(response_item_id_set))
for item in response["tiles"]:
diff --git a/posthog/clickhouse/migrations/0052_session_replay_embeddings.py b/posthog/clickhouse/migrations/0052_session_replay_embeddings.py
new file mode 100644
index 0000000000000..2263bbbd63655
--- /dev/null
+++ b/posthog/clickhouse/migrations/0052_session_replay_embeddings.py
@@ -0,0 +1,12 @@
+from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions
+from posthog.session_recordings.sql.session_replay_embeddings_sql import (
+ SESSION_REPLAY_EMBEDDINGS_TABLE_SQL,
+ DISTRIBUTED_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL,
+ WRITABLE_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL,
+)
+
+operations = [
+ run_sql_with_exceptions(WRITABLE_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL()),
+ run_sql_with_exceptions(DISTRIBUTED_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL()),
+ run_sql_with_exceptions(SESSION_REPLAY_EMBEDDINGS_TABLE_SQL()),
+]
diff --git a/posthog/hogql/autocomplete.py b/posthog/hogql/autocomplete.py
index c5808f05018a9..3a86ecd3fc3ae 100644
--- a/posthog/hogql/autocomplete.py
+++ b/posthog/hogql/autocomplete.py
@@ -97,6 +97,8 @@ def convert_field_or_table_to_type_string(field_or_table: FieldOrTable) -> str |
return "Date"
if isinstance(field_or_table, ast.StringJSONDatabaseField):
return "Object"
+ if isinstance(field_or_table, ast.ExpressionField):
+ return "Expression"
if isinstance(field_or_table, (ast.Table, ast.LazyJoin)):
return "Table"
@@ -181,6 +183,26 @@ def to_printed_hogql(self):
return None
+def get_tables_aliases(query: ast.SelectQuery, context: HogQLContext) -> Dict[str, ast.Table]:
+ tables: Dict[str, ast.Table] = {}
+
+ if query.select_from is not None and query.select_from.alias is not None:
+ table = get_table(context, query.select_from, query.ctes)
+ if table is not None:
+ tables[query.select_from.alias] = table
+
+ if query.select_from is not None and query.select_from.next_join is not None:
+ next_join: ast.JoinExpr | None = query.select_from.next_join
+ while next_join is not None:
+ if next_join.alias is not None:
+ table = get_table(context, next_join, query.ctes)
+ if table is not None:
+ tables[next_join.alias] = table
+ next_join = next_join.next_join
+
+ return tables
+
+
# Replaces all ast.FieldTraverser with the underlying node
def resolve_table_field_traversers(table: Table) -> Table:
new_table = deepcopy(table)
@@ -316,21 +338,26 @@ def get_hogql_autocomplete(query: HogQLAutocomplete, team: Team) -> HogQLAutocom
chain_len = len(node.chain)
last_table: Table = table
for index, chain_part in enumerate(node.chain):
- # TODO: Include joined table aliases
# Return just the table alias
if table_has_alias and index == 0 and chain_len == 1:
- alias = nearest_select.select_from.alias
- assert alias is not None
+ table_aliases = list(get_tables_aliases(nearest_select, context).keys())
extend_responses(
- keys=[alias],
+ keys=table_aliases,
suggestions=response.suggestions,
kind=Kind.Folder,
- details=["Table"],
+ details=["Table"] * len(table_aliases), # type: ignore
)
break
if table_has_alias and index == 0:
- continue
+ tables = get_tables_aliases(nearest_select, context)
+ aliased_table = tables.get(str(chain_part))
+ if aliased_table is not None:
+ last_table = aliased_table
+ continue
+ else:
+ # Dont continue if the alias is not found in the query
+ break
# Ignore last chain part, it's likely an incomplete word or added characters
is_last_part = index >= (chain_len - 2)
@@ -360,18 +387,21 @@ def get_hogql_autocomplete(query: HogQLAutocomplete, team: Team) -> HogQLAutocom
if match_term == MATCH_ANY_CHARACTER:
match_term = ""
- properties = PropertyDefinition.objects.filter(
+ property_query = PropertyDefinition.objects.filter(
name__contains=match_term,
team_id=team.pk,
type=property_type,
- )[:PROPERTY_DEFINITION_LIMIT].values("name", "property_type")
+ )
+
+ total_property_count = property_query.count()
+ properties = property_query[:PROPERTY_DEFINITION_LIMIT].values("name", "property_type")
extend_responses(
keys=[prop["name"] for prop in properties],
suggestions=response.suggestions,
details=[prop["property_type"] for prop in properties],
)
- response.incomplete_list = True
+ response.incomplete_list = total_property_count > PROPERTY_DEFINITION_LIMIT
elif isinstance(field, VirtualTable) or isinstance(field, LazyTable):
fields = list(last_table.fields.items())
extend_responses(
diff --git a/posthog/hogql/test/test_autocomplete.py b/posthog/hogql/test/test_autocomplete.py
index 8c5571fdeeea1..46eb8a1cd0394 100644
--- a/posthog/hogql/test/test_autocomplete.py
+++ b/posthog/hogql/test/test_autocomplete.py
@@ -165,7 +165,64 @@ def test_autocomplete_complete_list(self):
results = self._query_response(query=query, start=7, end=12)
assert results.incomplete_list is False
- def test_autocomplete_incomplete_list(self):
+ def test_autocomplete_properties_list_with_under_220_properties(self):
+ for index in range(20):
+ PropertyDefinition.objects.create(
+ team=self.team,
+ name=f"some_event_value_{index}",
+ property_type="String",
+ type=PropertyDefinition.Type.EVENT,
+ )
+
+ query = "select properties. from events"
+ results = self._query_response(query=query, start=18, end=18)
+ assert results.incomplete_list is False
+
+ def test_autocomplete_properties_list_with_over_220_properties(self):
+ for index in range(221):
+ PropertyDefinition.objects.create(
+ team=self.team,
+ name=f"some_event_value_{index}",
+ property_type="String",
+ type=PropertyDefinition.Type.EVENT,
+ )
+
query = "select properties. from events"
results = self._query_response(query=query, start=18, end=18)
assert results.incomplete_list is True
+
+ def test_autocomplete_joined_tables(self):
+ query = "select p. from events e left join persons p on e.person_id = p.id"
+ results = self._query_response(query=query, start=9, end=9)
+
+ assert len(results.suggestions) != 0
+
+ keys = list(PERSONS_FIELDS.keys())
+
+ for index, key in enumerate(keys):
+ assert results.suggestions[index].label == key
+
+ def test_autocomplete_joined_table_contraints(self):
+ query = "select p.id from events e left join persons p on e.person_id = p."
+ results = self._query_response(query=query, start=65, end=65)
+
+ assert len(results.suggestions) != 0
+
+ keys = list(PERSONS_FIELDS.keys())
+
+ for index, key in enumerate(keys):
+ assert results.suggestions[index].label == key
+
+ def test_autocomplete_joined_tables_aliases(self):
+ query = "select from events e left join persons p on e.person_id = p.id"
+ results = self._query_response(query=query, start=7, end=7)
+
+ assert len(results.suggestions) == 2
+ assert results.suggestions[0].label == "e"
+ assert results.suggestions[1].label == "p"
+
+ def test_autocomplete_non_existing_alias(self):
+ query = "select o. from events e"
+ results = self._query_response(query=query, start=9, end=9)
+
+ assert len(results.suggestions) == 0
diff --git a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
index d45052c06889a..9ff7f8ee0ab49 100644
--- a/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
+++ b/posthog/hogql/transforms/test/__snapshots__/test_in_cohort.ambr
@@ -31,7 +31,7 @@
FROM events LEFT JOIN (
SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id
FROM person_static_cohort
- WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [16]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
+ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [12]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0))
LIMIT 100
SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1
@@ -42,7 +42,7 @@
FROM events LEFT JOIN (
SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id
FROM static_cohort_people
- WHERE in(cohort_id, [16])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
+ WHERE in(cohort_id, [12])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
WHERE and(1, equals(__in_cohort.matched, 1))
LIMIT 100
'''
@@ -55,7 +55,7 @@
FROM events LEFT JOIN (
SELECT person_static_cohort.person_id AS cohort_person_id, 1 AS matched, person_static_cohort.cohort_id AS cohort_id
FROM person_static_cohort
- WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [17]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
+ WHERE and(equals(person_static_cohort.team_id, 420), in(person_static_cohort.cohort_id, [13]))) AS __in_cohort ON equals(__in_cohort.cohort_person_id, events.person_id)
WHERE and(equals(events.team_id, 420), 1, ifNull(equals(__in_cohort.matched, 1), 0))
LIMIT 100
SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1
@@ -66,7 +66,7 @@
FROM events LEFT JOIN (
SELECT person_id AS cohort_person_id, 1 AS matched, cohort_id
FROM static_cohort_people
- WHERE in(cohort_id, [17])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
+ WHERE in(cohort_id, [13])) AS __in_cohort ON equals(__in_cohort.cohort_person_id, person_id)
WHERE and(1, equals(__in_cohort.matched, 1))
LIMIT 100
'''
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
index 67eb76233f04e..b960faa708110 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
@@ -350,7 +350,7 @@
if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 1))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 20))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), 1, 0) AS step_0,
if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
@@ -871,7 +871,7 @@
if(and(equals(e.event, 'user signed up'), ifNull(in(e__pdi.person_id,
(SELECT person_static_cohort.person_id AS person_id
FROM person_static_cohort
- WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 1)))), 0)), 1, 0) AS step_0,
+ WHERE and(equals(person_static_cohort.team_id, 2), equals(person_static_cohort.cohort_id, 21)))), 0)), 1, 0) AS step_0,
if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
if(equals(e.event, 'paid'), 1, 0) AS step_1,
if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1
@@ -953,20 +953,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
+# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen
'''
- SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e.person_id))
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -975,24 +974,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1
+# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1001,7 +995,6 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
- step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1010,12 +1003,9 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
- if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1023,71 +1013,40 @@
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- prop AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
- prop AS prop
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- prop_vals AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e.`$group_0` AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'play movie'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- if(equals(e.event, 'buy'), 1, 0) AS step_2,
- if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
- ifNull(e__group_0.properties___industry, '') AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -1100,26 +1059,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2
+# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step
'''
- SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
- LEFT OUTER JOIN
- (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id,
- person_overrides.old_person_id AS old_person_id
- FROM person_overrides
- WHERE equals(person_overrides.team_id, 2)
- GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id)
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)))
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -1128,24 +1080,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1
+# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1154,7 +1101,6 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
- step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1163,12 +1109,9 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
- if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1176,71 +1119,47 @@
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- prop AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([['Safari'], ['Mac'], ['Chrome']], prop), prop, ['Other']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
- prop AS prop
+ prop_basic AS prop_basic,
+ prop_0 AS prop_0,
+ prop_1 AS prop_1,
+ prop,
+ prop_vals AS prop_vals,
+ prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- prop_vals AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e.`$group_0` AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'play movie'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- if(equals(e.event, 'buy'), 1, 0) AS step_2,
- if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
- ifNull(e__group_0.properties___industry, '') AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
+ if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
+ prop_1 AS prop,
+ groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY
+ JOIN prop_vals AS prop
+ WHERE ifNull(notEquals(prop, []), isNotNull(prop)
+ or isNotNull([]))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -1253,9 +1172,9 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen
+# name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot
'''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
INNER JOIN
@@ -1274,7 +1193,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1
+# name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
@@ -1316,7 +1235,7 @@
min(latest_1) OVER (PARTITION BY aggregation_target,
prop
ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
FROM
(SELECT timestamp AS timestamp,
aggregation_target AS aggregation_target,
@@ -1327,15 +1246,15 @@
prop_basic AS prop_basic,
prop,
prop_vals AS prop_vals,
- if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
FROM
(SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
e__pdi.person_id AS aggregation_target,
if(equals(e.event, 'sign up'), 1, 0) AS step_0,
if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_1,
if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
prop_basic AS prop,
argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
FROM events AS e
@@ -1359,19 +1278,173 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step
+# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
+ '''
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ count(*) AS count
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop AS prop,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2
'''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
count(*) AS count
FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ LEFT OUTER JOIN
+ (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id,
+ person_overrides.old_person_id AS old_person_id
+ FROM person_overrides
+ WHERE equals(person_overrides.team_id, 2)
+ GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id)
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -1380,19 +1453,24 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1
+# name: TestFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1401,6 +1479,7 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1409,9 +1488,12 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -1419,47 +1501,71 @@
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- if(has([['Safari'], ['Mac'], ['Chrome']], prop), prop, ['Other']) AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- prop_basic AS prop_basic,
- prop_0 AS prop_0,
- prop_1 AS prop_1,
- prop,
- prop_vals AS prop_vals,
- prop
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e__pdi.person_id AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
- if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
- if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
- prop_1 AS prop,
- groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY
- JOIN prop_vals AS prop
- WHERE ifNull(notEquals(prop, []), isNotNull(prop)
- or isNotNull([]))))
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -1472,7 +1578,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group
'''
SELECT ifNull(e__group_0.properties___industry, '') AS value,
count(*) AS count
@@ -1501,7 +1607,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.1
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
@@ -1633,7 +1739,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.2
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.2
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -1656,7 +1762,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.3
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.3
'''
SELECT aggregation_target AS actor_id
@@ -1776,7 +1882,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.4
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.4
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -1799,7 +1905,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.5
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.5
'''
SELECT aggregation_target AS actor_id
@@ -1919,7 +2025,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.6
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.6
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -1942,7 +2048,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.7
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.7
'''
SELECT aggregation_target AS actor_id
@@ -2062,7 +2168,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.8
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.8
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -2085,7 +2191,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_breakdown_group.9
+# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.9
'''
SELECT aggregation_target AS actor_id
@@ -2205,109 +2311,3 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot
- '''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS value,
- count(*) AS count
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
- GROUP BY value
- ORDER BY count DESC, value DESC
- LIMIT 26
- OFFSET 0 SETTINGS readonly=2,
- max_execution_time=60,
- allow_experimental_object_type=1
- '''
-# ---
-# name: TestFunnelBreakdown.test_funnel_step_multiple_breakdown_snapshot.1
- '''
- SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
- countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- steps AS steps,
- prop AS prop,
- avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- steps AS steps,
- prop AS prop,
- max(steps) OVER (PARTITION BY aggregation_target,
- prop) AS max_steps,
- step_1_conversion_time AS step_1_conversion_time,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
- if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e__pdi.person_id AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'buy'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
- WHERE ifNull(equals(step_0, 1), 0)))
- GROUP BY aggregation_target,
- steps,
- prop
- HAVING ifNull(equals(steps, max_steps), isNull(steps)
- and isNull(max_steps)))
- GROUP BY prop
- LIMIT 100 SETTINGS readonly=2,
- max_execution_time=60,
- allow_experimental_object_type=1
- '''
-# ---
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
index 75f2c0eac88db..5b12c1d8d00e0 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
@@ -1,18 +1,17 @@
# serializer version: 1
-# name: TestFunnelStrictStepsBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
+# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen
'''
- SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e.person_id))
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -21,24 +20,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1
+# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -47,7 +41,6 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
- step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -56,84 +49,49 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
- if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
- if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
- prop AS prop
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time
FROM
(SELECT aggregation_target AS aggregation_target,
timestamp AS timestamp,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- prop AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
- prop AS prop
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- prop_vals AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e.`$group_0` AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'play movie'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- if(equals(e.event, 'buy'), 1, 0) AS step_2,
- if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
- ifNull(e__group_0.properties___industry, '') AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -146,26 +104,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2
+# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step
'''
- SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
- LEFT OUTER JOIN
- (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id,
- person_overrides.old_person_id AS old_person_id
- FROM person_overrides
- WHERE equals(person_overrides.team_id, 2)
- GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id)
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)))
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -174,24 +125,19 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1
+# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -200,7 +146,6 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
- step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -209,84 +154,56 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
- if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
- if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
- prop AS prop
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time
FROM
(SELECT aggregation_target AS aggregation_target,
timestamp AS timestamp,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- prop AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1,
+ if(has([['Safari'], ['Mac'], ['Chrome']], prop), prop, ['Other']) AS prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- step_2 AS step_2,
- if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
- prop AS prop
+ prop_basic AS prop_basic,
+ prop_0 AS prop_0,
+ prop_1 AS prop_1,
+ prop,
+ prop_vals AS prop_vals,
+ prop
FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
- step_2 AS step_2,
- min(latest_2) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
- if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- step_2 AS step_2,
- latest_2 AS latest_2,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- prop_vals AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e.`$group_0` AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'play movie'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- if(equals(e.event, 'buy'), 1, 0) AS step_2,
- if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
- ifNull(e__group_0.properties___industry, '') AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- LEFT JOIN
- (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
- groups.group_type_index AS index,
- groups.group_key AS key
- FROM groups
- WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
- GROUP BY groups.group_type_index,
- groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
- WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
+ if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
+ prop_1 AS prop,
+ groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY
+ JOIN prop_vals AS prop
+ WHERE ifNull(notEquals(prop, []), isNotNull(prop)
+ or isNotNull([]))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -299,9 +216,9 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen
+# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot
'''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS value,
count(*) AS count
FROM events AS e
INNER JOIN
@@ -320,7 +237,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1
+# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
@@ -361,7 +278,7 @@
min(latest_1) OVER (PARTITION BY aggregation_target,
prop
ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1,
- if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
FROM
(SELECT timestamp AS timestamp,
aggregation_target AS aggregation_target,
@@ -372,15 +289,15 @@
prop_basic AS prop_basic,
prop,
prop_vals AS prop_vals,
- if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
FROM
(SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
e__pdi.person_id AS aggregation_target,
if(equals(e.event, 'sign up'), 1, 0) AS step_0,
if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_1,
if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
prop_basic AS prop,
argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
FROM events AS e
@@ -404,19 +321,173 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step
+# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
+ '''
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ count(*) AS count
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop AS prop,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2
'''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
count(*) AS count
FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ LEFT OUTER JOIN
+ (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id,
+ person_overrides.old_person_id AS old_person_id
+ FROM person_overrides
+ WHERE equals(person_overrides.team_id, 2)
+ GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id)
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)))
GROUP BY value
ORDER BY count DESC, value DESC
LIMIT 26
@@ -425,19 +496,24 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1
+# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
steps AS steps,
prop AS prop,
avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -446,6 +522,7 @@
max(steps) OVER (PARTITION BY aggregation_target,
prop) AS max_steps,
step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
@@ -454,56 +531,84 @@
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
- if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
FROM
(SELECT aggregation_target AS aggregation_target,
timestamp AS timestamp,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
prop
- ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1,
- if(has([['Safari'], ['Mac'], ['Chrome']], prop), prop, ['Other']) AS prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
step_0 AS step_0,
latest_0 AS latest_0,
step_1 AS step_1,
latest_1 AS latest_1,
- prop_basic AS prop_basic,
- prop_0 AS prop_0,
- prop_1 AS prop_1,
- prop,
- prop_vals AS prop_vals,
- prop
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e__pdi.person_id AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
- if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
- if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
- prop_1 AS prop,
- groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))))) ARRAY
- JOIN prop_vals AS prop
- WHERE ifNull(notEquals(prop, []), isNotNull(prop)
- or isNotNull([]))))
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
WHERE ifNull(equals(step_0, 1), 0)))
GROUP BY aggregation_target,
steps,
@@ -516,7 +621,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group
'''
SELECT ifNull(e__group_0.properties___industry, '') AS value,
count(*) AS count
@@ -545,7 +650,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.1
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.1
'''
SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
countIf(ifNull(equals(steps, 2), 0)) AS step_2,
@@ -677,7 +782,7 @@
allow_experimental_object_type=1
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.2
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.2
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -700,7 +805,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.3
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.3
'''
SELECT aggregation_target AS actor_id
@@ -797,7 +902,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.4
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -820,7 +925,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.5
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.5
'''
SELECT aggregation_target AS actor_id
@@ -917,7 +1022,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.6
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -940,7 +1045,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.7
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.7
'''
SELECT aggregation_target AS actor_id
@@ -1037,7 +1142,7 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.8
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8
'''
SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
@@ -1060,7 +1165,7 @@
OFFSET 0
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_breakdown_group.9
+# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.9
'''
SELECT aggregation_target AS actor_id
@@ -1157,108 +1262,3 @@
max_expanded_ast_elements=1000000
'''
# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot
- '''
- SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS value,
- count(*) AS count
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
- GROUP BY value
- ORDER BY count DESC, value DESC
- LIMIT 26
- OFFSET 0 SETTINGS readonly=2,
- max_execution_time=60,
- allow_experimental_object_type=1
- '''
-# ---
-# name: TestFunnelStrictStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot.1
- '''
- SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
- countIf(ifNull(equals(steps, 2), 0)) AS step_2,
- avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
- median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- steps AS steps,
- prop AS prop,
- avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
- median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- steps AS steps,
- prop AS prop,
- max(steps) OVER (PARTITION BY aggregation_target,
- prop) AS max_steps,
- step_1_conversion_time AS step_1_conversion_time,
- prop AS prop
- FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- prop AS prop,
- if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1) AS steps,
- if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time
- FROM
- (SELECT aggregation_target AS aggregation_target,
- timestamp AS timestamp,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- min(latest_1) OVER (PARTITION BY aggregation_target,
- prop
- ORDER BY timestamp DESC ROWS BETWEEN 1 PRECEDING AND 1 PRECEDING) AS latest_1,
- if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
- FROM
- (SELECT timestamp AS timestamp,
- aggregation_target AS aggregation_target,
- step_0 AS step_0,
- latest_0 AS latest_0,
- step_1 AS step_1,
- latest_1 AS latest_1,
- prop_basic AS prop_basic,
- prop,
- prop_vals AS prop_vals,
- if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
- FROM
- (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
- e__pdi.person_id AS aggregation_target,
- if(equals(e.event, 'sign up'), 1, 0) AS step_0,
- if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
- if(equals(e.event, 'buy'), 1, 0) AS step_1,
- if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
- [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
- prop_basic AS prop,
- argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
- FROM events AS e
- INNER JOIN
- (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
- person_distinct_id2.distinct_id AS distinct_id
- FROM person_distinct_id2
- WHERE equals(person_distinct_id2.team_id, 2)
- GROUP BY person_distinct_id2.distinct_id
- HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
- WHERE and(equals(e.team_id, 2), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC')))))))
- WHERE ifNull(equals(step_0, 1), 0)))
- GROUP BY aggregation_target,
- steps,
- prop
- HAVING ifNull(equals(steps, max_steps), isNull(steps)
- and isNull(max_steps)))
- GROUP BY prop
- LIMIT 100 SETTINGS readonly=2,
- max_execution_time=60,
- allow_experimental_object_type=1
- '''
-# ---
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr
new file mode 100644
index 0000000000000..214583b03f081
--- /dev/null
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr
@@ -0,0 +1,2147 @@
+# serializer version: 1
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen
+ '''
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ count(*) AS count
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
+ WHERE ifNull(equals(step_0, 1), 0)
+ UNION ALL SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['']) AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step
+ '''
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ count(*) AS count
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop_0 AS prop_0,
+ prop_1 AS prop_1,
+ prop,
+ prop_vals AS prop_vals,
+ prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
+ if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
+ prop_1 AS prop,
+ groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY
+ JOIN prop_vals AS prop
+ WHERE ifNull(notEquals(prop, []), isNotNull(prop)
+ or isNotNull([]))))
+ WHERE ifNull(equals(step_0, 1), 0)
+ UNION ALL SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([[''], ['Mac'], ['Chrome'], ['Safari']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop_0 AS prop_0,
+ prop_1 AS prop_1,
+ prop,
+ prop_vals AS prop_vals,
+ prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(and(equals(e.event, 'buy'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), 'xyz'), 0)), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ if(ifNull(equals(step_0, 1), 0), prop_basic, []) AS prop_0,
+ if(ifNull(equals(step_1, 1), 0), prop_basic, []) AS prop_1,
+ prop_1 AS prop,
+ groupUniqArray(prop) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0)))) ARRAY
+ JOIN prop_vals AS prop
+ WHERE ifNull(notEquals(prop, []), isNotNull(prop)
+ or isNotNull([]))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot
+ '''
+ SELECT [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS value,
+ count(*) AS count
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestFunnelUnorderedStepsBreakdown.test_funnel_step_multiple_breakdown_snapshot.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'buy'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
+ WHERE ifNull(equals(step_0, 1), 0)
+ UNION ALL SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop AS prop,
+ arraySort([latest_0, latest_1]) AS event_times,
+ arraySum([if(and(ifNull(less(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 1, 0), 1]) AS steps,
+ arraySort([latest_0, latest_1]) AS conversion_times,
+ if(and(isNotNull(conversion_times[2]), ifNull(lessOrEquals(conversion_times[2], plus(conversion_times[1], toIntervalDay(14))), 0)), dateDiff('second', conversion_times[1], conversion_times[2]), NULL) AS step_1_conversion_time
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ if(has([['', ''], ['alakazam', ''], ['Safari', 'xyz'], ['Mac', ''], ['Chrome', 'xyz'], ['0', '0'], ['', 'no-mac']], prop), prop, ['Other']) AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ if(notEmpty(arrayFilter(x -> notEmpty(x), prop_vals)), prop_vals, ['', '']) AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'buy'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ [ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$browser'), ''), 'null'), '^"|"$', ''), ''), ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$version'), ''), 'null'), '^"|"$', ''), '')] AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, notEmpty(arrayFilter(x -> notEmpty(x), prop))) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
+ '''
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ count(*) AS count
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop AS prop,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2
+ '''
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ count(*) AS count
+ FROM events AS e
+ LEFT OUTER JOIN
+ (SELECT argMax(person_overrides.override_person_id, person_overrides.version) AS override_person_id,
+ person_overrides.old_person_id AS old_person_id
+ FROM person_overrides
+ WHERE equals(person_overrides.team_id, 2)
+ GROUP BY person_overrides.old_person_id) AS e__override ON equals(e.person_id, e__override.old_person_id)
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id)))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events_poe_v2.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop AS prop,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e.`$group_0` AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group
+ '''
+ SELECT ifNull(e__group_0.properties___industry, '') AS value,
+ count(*) AS count
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), in(e.event, tuple('buy', 'play movie', 'sign up')), and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), notEmpty(e__pdi.person_id))
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.1
+ '''
+ SELECT countIf(ifNull(equals(steps, 1), 0)) AS step_1,
+ countIf(ifNull(equals(steps, 2), 0)) AS step_2,
+ countIf(ifNull(equals(steps, 3), 0)) AS step_3,
+ avg(step_1_average_conversion_time_inner) AS step_1_average_conversion_time,
+ avg(step_2_average_conversion_time_inner) AS step_2_average_conversion_time,
+ median(step_1_median_conversion_time_inner) AS step_1_median_conversion_time,
+ median(step_2_median_conversion_time_inner) AS step_2_median_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ avg(step_1_conversion_time) AS step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) AS step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) AS step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) AS step_2_median_conversion_time_inner,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ steps AS steps,
+ prop AS prop,
+ max(steps) OVER (PARTITION BY aggregation_target,
+ prop) AS max_steps,
+ step_1_conversion_time AS step_1_conversion_time,
+ step_2_conversion_time AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop AS prop,
+ if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0), ifNull(lessOrEquals(latest_1, latest_2), 0), ifNull(lessOrEquals(latest_2, plus(latest_0, toIntervalDay(14))), 0)), 3, if(and(ifNull(lessOrEquals(latest_0, latest_1), 0), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), 2, 1)) AS steps,
+ if(and(isNotNull(latest_1), ifNull(lessOrEquals(latest_1, plus(latest_0, toIntervalDay(14))), 0)), dateDiff('second', latest_0, latest_1), NULL) AS step_1_conversion_time,
+ if(and(isNotNull(latest_2), ifNull(lessOrEquals(latest_2, plus(latest_1, toIntervalDay(14))), 0)), dateDiff('second', latest_1, latest_2), NULL) AS step_2_conversion_time,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ if(ifNull(less(latest_2, latest_1), 0), NULL, latest_2) AS latest_2,
+ prop AS prop
+ FROM
+ (SELECT aggregation_target AS aggregation_target,
+ timestamp AS timestamp,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ min(latest_1) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_1,
+ step_2 AS step_2,
+ min(latest_2) OVER (PARTITION BY aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) AS latest_2,
+ if(has(['technology', 'finance'], prop), prop, 'Other') AS prop
+ FROM
+ (SELECT timestamp AS timestamp,
+ aggregation_target AS aggregation_target,
+ step_0 AS step_0,
+ latest_0 AS latest_0,
+ step_1 AS step_1,
+ latest_1 AS latest_1,
+ step_2 AS step_2,
+ latest_2 AS latest_2,
+ prop_basic AS prop_basic,
+ prop,
+ prop_vals AS prop_vals,
+ prop_vals AS prop
+ FROM
+ (SELECT toTimeZone(e.timestamp, 'UTC') AS timestamp,
+ e__pdi.person_id AS aggregation_target,
+ if(equals(e.event, 'sign up'), 1, 0) AS step_0,
+ if(ifNull(equals(step_0, 1), 0), timestamp, NULL) AS latest_0,
+ if(equals(e.event, 'play movie'), 1, 0) AS step_1,
+ if(ifNull(equals(step_1, 1), 0), timestamp, NULL) AS latest_1,
+ if(equals(e.event, 'buy'), 1, 0) AS step_2,
+ if(ifNull(equals(step_2, 1), 0), timestamp, NULL) AS latest_2,
+ ifNull(e__group_0.properties___industry, '') AS prop_basic,
+ prop_basic AS prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) OVER (PARTITION BY aggregation_target) AS prop_vals
+ FROM events AS e
+ INNER JOIN
+ (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id,
+ person_distinct_id2.distinct_id AS distinct_id
+ FROM person_distinct_id2
+ WHERE equals(person_distinct_id2.team_id, 2)
+ GROUP BY person_distinct_id2.distinct_id
+ HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS e__pdi ON equals(e.distinct_id, e__pdi.distinct_id)
+ LEFT JOIN
+ (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'industry'), ''), 'null'), '^"|"$', ''), groups._timestamp) AS properties___industry,
+ groups.group_type_index AS index,
+ groups.group_key AS key
+ FROM groups
+ WHERE and(equals(groups.team_id, 2), ifNull(equals(index, 0), 0))
+ GROUP BY groups.group_type_index,
+ groups.group_key) AS e__group_0 ON equals(e.`$group_0`, e__group_0.key)
+ WHERE and(equals(e.team_id, 2), and(and(greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-01 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), toDateTime64('2020-01-08 23:59:59.999999', 6, 'UTC'))), in(e.event, tuple('buy', 'play movie', 'sign up'))), or(ifNull(equals(step_0, 1), 0), ifNull(equals(step_1, 1), 0), ifNull(equals(step_2, 1), 0))))))))
+ WHERE ifNull(equals(step_0, 1), 0)))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING ifNull(equals(steps, max_steps), isNull(steps)
+ and isNull(max_steps)))
+ GROUP BY prop
+ LIMIT 100 SETTINGS readonly=2,
+ max_execution_time=60,
+ allow_experimental_object_type=1
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.10
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.11
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.12
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.13
+ '''
+
+ SELECT aggregation_target AS actor_id
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ avg(step_1_conversion_time) step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) step_2_median_conversion_time_inner,
+ prop
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ max(steps) over (PARTITION BY aggregation_target,
+ prop) as max_steps,
+ step_1_conversion_time,
+ step_2_conversion_time,
+ prop
+ FROM
+ (SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'sign up', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'play movie', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'buy', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'play movie', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'buy', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'sign up', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'buy', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'sign up', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'play movie', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1 ))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING steps = max_steps)
+ WHERE steps IN [1, 2, 3]
+ AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
+ ORDER BY aggregation_target
+ LIMIT 100
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.14
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.15
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.16
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.17
+ '''
+
+ SELECT aggregation_target AS actor_id
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ avg(step_1_conversion_time) step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) step_2_median_conversion_time_inner,
+ prop
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ max(steps) over (PARTITION BY aggregation_target,
+ prop) as max_steps,
+ step_1_conversion_time,
+ step_2_conversion_time,
+ prop
+ FROM
+ (SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'sign up', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'play movie', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'buy', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'play movie', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'buy', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'sign up', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'buy', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'sign up', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'play movie', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1 ))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING steps = max_steps)
+ WHERE steps IN [2, 3]
+ AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
+ ORDER BY aggregation_target
+ LIMIT 100
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.2
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.3
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.4
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.5
+ '''
+
+ SELECT aggregation_target AS actor_id
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ avg(step_1_conversion_time) step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) step_2_median_conversion_time_inner,
+ prop
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ max(steps) over (PARTITION BY aggregation_target,
+ prop) as max_steps,
+ step_1_conversion_time,
+ step_2_conversion_time,
+ prop
+ FROM
+ (SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'sign up', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'play movie', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'buy', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'play movie', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'buy', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'sign up', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'buy', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'sign up', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'play movie', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1 ))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING steps = max_steps)
+ WHERE steps IN [1, 2, 3]
+ AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
+ ORDER BY aggregation_target
+ LIMIT 100
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.6
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.7
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.8
+ '''
+
+ SELECT replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS value,
+ count(*) as count
+ FROM events e
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ GROUP BY value
+ ORDER BY count DESC, value DESC
+ LIMIT 26
+ OFFSET 0
+ '''
+# ---
+# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.9
+ '''
+
+ SELECT aggregation_target AS actor_id
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ avg(step_1_conversion_time) step_1_average_conversion_time_inner,
+ avg(step_2_conversion_time) step_2_average_conversion_time_inner,
+ median(step_1_conversion_time) step_1_median_conversion_time_inner,
+ median(step_2_conversion_time) step_2_median_conversion_time_inner,
+ prop
+ FROM
+ (SELECT aggregation_target,
+ steps,
+ max(steps) over (PARTITION BY aggregation_target,
+ prop) as max_steps,
+ step_1_conversion_time,
+ step_2_conversion_time,
+ prop
+ FROM
+ (SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'sign up', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'play movie', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'buy', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'play movie', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'buy', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'sign up', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1
+ UNION ALL SELECT *,
+ arraySort([latest_0,latest_1,latest_2]) as event_times,
+ arraySum([if(latest_0 < latest_1 AND latest_1 <= latest_0 + INTERVAL 7 DAY, 1, 0),if(latest_0 < latest_2 AND latest_2 <= latest_0 + INTERVAL 7 DAY, 1, 0), 1]) AS steps ,
+ arraySort([latest_0,latest_1,latest_2]) as conversion_times,
+ if(isNotNull(conversion_times[2])
+ AND conversion_times[2] <= conversion_times[1] + INTERVAL 7 DAY, dateDiff('second', conversion_times[1], conversion_times[2]), NULL) step_1_conversion_time,
+ if(isNotNull(conversion_times[3])
+ AND conversion_times[3] <= conversion_times[2] + INTERVAL 7 DAY, dateDiff('second', conversion_times[2], conversion_times[3]), NULL) step_2_conversion_time
+ FROM
+ (SELECT aggregation_target, timestamp, step_0,
+ latest_0,
+ step_1,
+ min(latest_1) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_1,
+ step_2,
+ min(latest_2) over (PARTITION by aggregation_target,
+ prop
+ ORDER BY timestamp DESC ROWS BETWEEN UNBOUNDED PRECEDING AND 0 PRECEDING) latest_2 ,
+ if(has(['technology', 'finance'], prop), prop, 'Other') as prop
+ FROM
+ (SELECT *,
+ prop_vals as prop
+ FROM
+ (SELECT e.timestamp as timestamp,
+ pdi.person_id as aggregation_target,
+ pdi.person_id as person_id,
+ if(event = 'buy', 1, 0) as step_0,
+ if(step_0 = 1, timestamp, null) as latest_0,
+ if(event = 'sign up', 1, 0) as step_1,
+ if(step_1 = 1, timestamp, null) as latest_1,
+ if(event = 'play movie', 1, 0) as step_2,
+ if(step_2 = 1, timestamp, null) as latest_2,
+ replaceRegexpAll(JSONExtractRaw(group_properties_0, 'industry'), '^"|"$', '') AS prop_basic,
+ prop_basic as prop,
+ argMinIf(prop, timestamp, isNotNull(prop)) over (PARTITION by aggregation_target) as prop_vals
+ FROM events e
+ INNER JOIN
+ (SELECT distinct_id,
+ argMax(person_id, version) as person_id
+ FROM person_distinct_id2
+ WHERE team_id = 2
+ AND distinct_id IN
+ (SELECT distinct_id
+ FROM events
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC') )
+ GROUP BY distinct_id
+ HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
+ LEFT JOIN
+ (SELECT group_key,
+ argMax(group_properties, _timestamp) AS group_properties_0
+ FROM groups
+ WHERE team_id = 2
+ AND group_type_index = 0
+ GROUP BY group_key) groups_0 ON "$group_0" == groups_0.group_key
+ WHERE team_id = 2
+ AND event IN ['buy', 'play movie', 'sign up']
+ AND toTimeZone(timestamp, 'UTC') >= toDateTime('2020-01-01 00:00:00', 'UTC')
+ AND toTimeZone(timestamp, 'UTC') <= toDateTime('2020-01-08 23:59:59', 'UTC')
+ AND (step_0 = 1
+ OR step_1 = 1
+ OR step_2 = 1) )))
+ WHERE step_0 = 1 ))
+ GROUP BY aggregation_target,
+ steps,
+ prop
+ HAVING steps = max_steps)
+ WHERE steps IN [2, 3]
+ AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
+ ORDER BY aggregation_target
+ LIMIT 100
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
+ '''
+# ---
diff --git a/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py b/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py
index 39359a906656b..4d00b0a265b98 100644
--- a/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py
+++ b/posthog/hogql_queries/insights/funnels/test/breakdown_cases.py
@@ -51,29 +51,6 @@ def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None):
return [val["id"] for val in serialized_result]
- def _create_groups(self):
- GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
- GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1)
-
- create_group(
- team_id=self.team.pk,
- group_type_index=0,
- group_key="org:5",
- properties={"industry": "finance"},
- )
- create_group(
- team_id=self.team.pk,
- group_type_index=0,
- group_key="org:6",
- properties={"industry": "technology"},
- )
- create_group(
- team_id=self.team.pk,
- group_type_index=1,
- group_key="org:5",
- properties={"industry": "random"},
- )
-
def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]):
def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]:
return {
@@ -2681,6 +2658,73 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
self.assertCountEqual([res[0]["breakdown"] for res in results], [["Mac"], ["Safari"]])
+ return TestFunnelBreakdown
+
+
+def funnel_breakdown_group_test_factory(FunnelPerson):
+ funnel_order_type = FunnelOrderType.ORDERED
+
+ class TestFunnelBreakdownGroup(APIBaseTest):
+ def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None):
+ filter = Filter(data=filter, team=self.team)
+ person_filter = filter.shallow_clone({"funnel_step": funnel_step, "funnel_step_breakdown": breakdown_value})
+ _, serialized_result, _ = FunnelPerson(person_filter, self.team).get_actors()
+
+ return [val["id"] for val in serialized_result]
+
+ def _create_groups(self):
+ GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0)
+ GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1)
+
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:5",
+ properties={"industry": "finance"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=0,
+ group_key="org:6",
+ properties={"industry": "technology"},
+ )
+ create_group(
+ team_id=self.team.pk,
+ group_type_index=1,
+ group_key="org:5",
+ properties={"industry": "random"},
+ )
+
+ def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]):
+ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]:
+ return {
+ "action_id": step.name if step.type == "events" else step.action_id,
+ "name": step.name,
+ "custom_name": None,
+ "order": order,
+ "people": [],
+ "count": step.count,
+ "type": step.type,
+ "average_conversion_time": step.average_conversion_time,
+ "median_conversion_time": step.median_conversion_time,
+ "breakdown": step.breakdown,
+ "breakdown_value": step.breakdown,
+ **(
+ {
+ "action_id": None,
+ "name": f"Completed {order+1} step{'s' if order > 0 else ''}",
+ }
+ if funnel_order_type == FunnelOrderType.UNORDERED
+ else {}
+ ),
+ }
+
+ step_results = []
+ for index, step_result in enumerate(steps):
+ step_results.append(funnel_result(step_result, index))
+
+ assert_funnel_results_equal(result, step_results)
+
@snapshot_clickhouse_queries
def test_funnel_breakdown_group(self):
self._create_groups()
@@ -3020,7 +3064,7 @@ def test_funnel_aggregate_by_groups_breakdown_group_person_on_events(self):
],
)
- return TestFunnelBreakdown
+ return TestFunnelBreakdownGroup
def sort_breakdown_funnel_results(results: List[Dict[int, Any]]):
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel.py b/posthog/hogql_queries/insights/funnels/test/test_funnel.py
index 8c374eacfdb2d..119afafbf4f71 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel.py
@@ -32,8 +32,9 @@
funnel_conversion_time_test_factory,
)
from posthog.hogql_queries.insights.funnels.test.breakdown_cases import (
- assert_funnel_results_equal,
funnel_breakdown_test_factory,
+ funnel_breakdown_group_test_factory,
+ assert_funnel_results_equal,
)
from posthog.hogql_queries.insights.funnels import Funnel
from posthog.test.test_journeys import journeys_for
@@ -61,6 +62,15 @@ class TestFunnelBreakdown(
pass
+class TestFunnelGroupBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_group_test_factory( # type: ignore
+ ClickhouseFunnelActors,
+ ),
+):
+ pass
+
+
class TestFunnelConversionTime(
ClickhouseTestMixin,
funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelActors), # type: ignore
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py
index 9038576a9ebfd..a673eb0cddb62 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_strict.py
@@ -8,8 +8,9 @@
)
from posthog.hogql_queries.insights.funnels.test.breakdown_cases import (
- assert_funnel_results_equal,
funnel_breakdown_test_factory,
+ funnel_breakdown_group_test_factory,
+ assert_funnel_results_equal,
)
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
from posthog.models.action import Action
@@ -179,6 +180,15 @@ def test_strict_breakdown_events_with_multiple_properties(self):
self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [people["person2"].uuid])
+class TestStrictFunnelGroupBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_group_test_factory( # type: ignore
+ ClickhouseFunnelStrictActors,
+ ),
+):
+ pass
+
+
class TestFunnelStrictStepsConversionTime(
ClickhouseTestMixin,
funnel_conversion_time_test_factory(FunnelOrderType.ORDERED, ClickhouseFunnelStrictActors), # type: ignore
diff --git a/posthog/hogql_queries/insights/funnels/test/test_funnel_unordered.py b/posthog/hogql_queries/insights/funnels/test/test_funnel_unordered.py
index ae72ba3ab37b3..36e5d87f39e49 100644
--- a/posthog/hogql_queries/insights/funnels/test/test_funnel_unordered.py
+++ b/posthog/hogql_queries/insights/funnels/test/test_funnel_unordered.py
@@ -1,4 +1,4 @@
-# from datetime import datetime
+from datetime import datetime
from typing import cast
from rest_framework.exceptions import ValidationError
@@ -7,8 +7,8 @@
from posthog.hogql_queries.insights.funnels.funnels_query_runner import FunnelsQueryRunner
from posthog.hogql_queries.legacy_compatibility.filter_to_query import filter_to_query
-# from posthog.models.action import Action
-# from posthog.models.action_step import ActionStep
+from posthog.models.action import Action
+from posthog.models.action_step import ActionStep
from posthog.models.filters import Filter
from posthog.models.property_definition import PropertyDefinition
from posthog.queries.funnels.funnel_unordered_persons import (
@@ -19,619 +19,630 @@
)
from posthog.schema import FunnelsQuery
-# from posthog.hogql_queries.insights.funnels.test.breakdown_cases import (
-# assert_funnel_results_equal,
-# funnel_breakdown_test_factory,
-# )
+from posthog.hogql_queries.insights.funnels.test.breakdown_cases import (
+ FunnelStepResult,
+ funnel_breakdown_test_factory,
+ funnel_breakdown_group_test_factory,
+ assert_funnel_results_equal,
+)
from posthog.test.base import (
APIBaseTest,
ClickhouseTestMixin,
_create_event,
_create_person,
- # snapshot_clickhouse_queries,
+ snapshot_clickhouse_queries,
)
-# from posthog.test.test_journeys import journeys_for
+from posthog.test.test_journeys import journeys_for
FORMAT_TIME = "%Y-%m-%d 00:00:00"
-# def _create_action(**kwargs):
-# team = kwargs.pop("team")
-# name = kwargs.pop("name")
-# properties = kwargs.pop("properties", {})
-# action = Action.objects.create(team=team, name=name)
-# ActionStep.objects.create(action=action, event=name, properties=properties)
-# return action
-
-
-# class TestFunnelUnorderedStepsBreakdown(
-# ClickhouseTestMixin,
-# funnel_breakdown_test_factory( # type: ignore
-# FunnelUnordered,
-# ClickhouseFunnelUnorderedActors,
-# _create_event,
-# _create_action,
-# _create_person,
-# ),
-# ):
-# maxDiff = None
-
-# def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self):
-# # overriden from factory
-
-# filters = {
-# "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": "$browser",
-# "breakdown_attribution_type": "all_events",
-# }
-
-# # event
-# person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
-# _create_event(
-# team=self.team,
-# event="sign up",
-# distinct_id="person1",
-# properties={"key": "val", "$browser": "Chrome"},
-# timestamp="2020-01-01T12:00:00Z",
-# )
-# _create_event(
-# team=self.team,
-# event="sign up",
-# distinct_id="person1",
-# properties={"key": "val", "$browser": "Safari"},
-# timestamp="2020-01-02T13:00:00Z",
-# )
-# _create_event(
-# team=self.team,
-# event="play movie",
-# distinct_id="person1",
-# properties={"key": "val", "$browser": "Safari"},
-# timestamp="2020-01-02T14:00:00Z",
-# )
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-
-# assert_funnel_results_equal(
-# results[0],
-# [
-# {
-# "action_id": None,
-# "name": "Completed 1 step",
-# "custom_name": None,
-# "order": 0,
-# "people": [],
-# "count": 1,
-# "type": "events",
-# "average_conversion_time": None,
-# "median_conversion_time": None,
-# "breakdown": ["Chrome"],
-# "breakdown_value": ["Chrome"],
-# },
-# {
-# "action_id": None,
-# "name": "Completed 2 steps",
-# "custom_name": None,
-# "order": 1,
-# "people": [],
-# "count": 0,
-# "type": "events",
-# "average_conversion_time": None,
-# "median_conversion_time": None,
-# "breakdown": ["Chrome"],
-# "breakdown_value": ["Chrome"],
-# },
-# ],
-# )
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ["Chrome"]), [person1.uuid])
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Chrome"]), [])
-
-# assert_funnel_results_equal(
-# results[1],
-# [
-# {
-# "action_id": None,
-# "name": "Completed 1 step",
-# "custom_name": None,
-# "order": 0,
-# "people": [],
-# "count": 1,
-# "type": "events",
-# "average_conversion_time": None,
-# "median_conversion_time": None,
-# "breakdown": ["Safari"],
-# "breakdown_value": ["Safari"],
-# },
-# {
-# "action_id": None,
-# "name": "Completed 2 steps",
-# "custom_name": None,
-# "order": 1,
-# "people": [],
-# "count": 1,
-# "type": "events",
-# "average_conversion_time": 3600,
-# "median_conversion_time": 3600,
-# "breakdown": ["Safari"],
-# "breakdown_value": ["Safari"],
-# },
-# ],
-# )
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ["Safari"]), [person1.uuid])
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [person1.uuid])
-
-# def test_funnel_step_breakdown_with_step_attribution(self):
-# # overridden from factory, since with no order, step one is step zero, and vice versa
-
-# filters = {
-# "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": ["$browser"],
-# "breakdown_attribution_type": "step",
-# "breakdown_attribution_value": "0",
-# "funnel_order_type": "unordered",
-# }
-
-# # event
-# events_by_person = {
-# "person1": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 1, 12),
-# "properties": {"$browser": "Chrome"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
-# ],
-# "person2": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 13),
-# "properties": {"$browser": "Safari"},
-# },
-# ],
-# "person3": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 14),
-# "properties": {"$browser": "Mac"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
-# ],
-# "person4": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 15),
-# "properties": {"$browser": 0},
-# },
-# # step attribution means alakazam is valid when step = 1
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 16),
-# "properties": {"$browser": "alakazam"},
-# },
-# ],
-# }
-# people = journeys_for(events_by_person, self.team)
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-# results = sorted(results, key=lambda res: res[0]["breakdown"])
-
-# self.assertEqual(len(results), 6)
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Mac"), [people["person3"].uuid])
-
-# def test_funnel_step_breakdown_with_step_one_attribution(self):
-# # overridden from factory, since with no order, step one is step zero, and vice versa
-# filters = {
-# "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": ["$browser"],
-# "breakdown_attribution_type": "step",
-# "breakdown_attribution_value": "1",
-# "funnel_order_type": "unordered",
-# }
-
-# # event
-# events_by_person = {
-# "person1": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 1, 12),
-# "properties": {"$browser": "Chrome"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
-# ],
-# "person2": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 13),
-# "properties": {"$browser": "Safari"},
-# },
-# ],
-# "person3": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 14),
-# "properties": {"$browser": "Mac"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
-# ],
-# "person4": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 15),
-# "properties": {"$browser": 0},
-# },
-# # step attribution means alakazam is valid when step = 1
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 16),
-# "properties": {"$browser": "alakazam"},
-# },
-# ],
-# }
-# people = journeys_for(events_by_person, self.team)
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-# results = sorted(results, key=lambda res: res[0]["breakdown"])
-
-# self.assertEqual(len(results), 6)
-# # unordered, so everything is step one too.
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[0],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=[""], count=3),
-# FunnelStepResult(
-# name="Completed 2 steps",
-# breakdown=[""],
-# count=2,
-# average_conversion_time=3600,
-# median_conversion_time=3600,
-# ),
-# ],
-# )
-
-# self.assertCountEqual(
-# self._get_actor_ids_at_step(filters, 1, ""),
-# [people["person1"].uuid, people["person2"].uuid, people["person3"].uuid],
-# )
-# self.assertCountEqual(
-# self._get_actor_ids_at_step(filters, 2, ""),
-# [people["person1"].uuid, people["person3"].uuid],
-# )
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[1],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
-# FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
-
-# def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self):
-# # overridden from factory, since with no order, step one is step zero, and vice versa
-
-# filters = {
-# "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": ["$browser"],
-# "breakdown_attribution_type": "step",
-# "breakdown_attribution_value": "1",
-# "funnel_order_type": "unordered",
-# }
-
-# # event
-# events_by_person = {
-# "person1": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 1, 12),
-# "properties": {"$browser": "Chrome"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
-# ],
-# "person2": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
-# # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
-# ],
-# "person3": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 14),
-# "properties": {"$browser": "Mac"},
-# },
-# # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
-# ],
-# "person4": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 15),
-# "properties": {"$browser": 0},
-# },
-# # step attribution means alakazam is valid when step = 1
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 16),
-# "properties": {"$browser": "alakazam"},
-# },
-# ],
-# }
-# people = journeys_for(events_by_person, self.team)
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-# results = sorted(results, key=lambda res: res[0]["breakdown"])
-
-# # Breakdown by step_1 means funnel items that never reach step_1 are NULLed out
-# self.assertEqual(len(results), 4)
-# # Chrome and Mac and Safari goes away
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[0],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=[""], count=1),
-# FunnelStepResult(
-# name="Completed 2 steps",
-# breakdown=[""],
-# count=1,
-# average_conversion_time=3600,
-# median_conversion_time=3600,
-# ),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ""), [people["person1"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[1],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
-# FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[2],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["Chrome"], count=1),
-# FunnelStepResult(name="Completed 2 steps", breakdown=["Chrome"], count=0),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Chrome"), [people["person1"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[3],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["alakazam"], count=1),
-# FunnelStepResult(
-# name="Completed 2 steps",
-# breakdown=["alakazam"],
-# count=1,
-# average_conversion_time=3600,
-# median_conversion_time=3600,
-# ),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "alakazam"), [people["person4"].uuid])
-
-# def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_funnel(self):
-# # overridden from factory, since with no order, step one is step zero, and vice versa
-
-# filters = {
-# "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": "$browser",
-# "breakdown_attribution_type": "step",
-# "breakdown_attribution_value": "1",
-# "funnel_order_type": "unordered",
-# }
-
-# # event
-# events_by_person = {
-# "person1": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 1, 12),
-# "properties": {"$browser": "Chrome"},
-# },
-# {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
-# ],
-# "person2": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
-# # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
-# ],
-# "person3": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 14),
-# "properties": {"$browser": "Mac"},
-# },
-# # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
-# ],
-# "person4": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 15),
-# "properties": {"$browser": 0},
-# },
-# # step attribution means alakazam is valid when step = 1
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 16),
-# "properties": {"$browser": "alakazam"},
-# },
-# ],
-# }
-# people = journeys_for(events_by_person, self.team)
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-# results = sorted(results, key=lambda res: res[0]["breakdown"])
-
-# # Breakdown by step_1 means funnel items that never reach step_1 are NULLed out
-# self.assertEqual(len(results), 4)
-# # Chrome and Mac and Safari goes away
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[0],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=[""], count=1),
-# FunnelStepResult(
-# name="Completed 2 steps",
-# breakdown=[""],
-# count=1,
-# average_conversion_time=3600,
-# median_conversion_time=3600,
-# ),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ""), [people["person1"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[1],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
-# FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[2],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["Chrome"], count=1),
-# FunnelStepResult(name="Completed 2 steps", breakdown=["Chrome"], count=0),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Chrome"), [people["person1"].uuid])
-
-# self._assert_funnel_breakdown_result_is_correct(
-# results[3],
-# [
-# FunnelStepResult(name="Completed 1 step", breakdown=["alakazam"], count=1),
-# FunnelStepResult(
-# name="Completed 2 steps",
-# breakdown=["alakazam"],
-# count=1,
-# average_conversion_time=3600,
-# median_conversion_time=3600,
-# ),
-# ],
-# )
-
-# self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "alakazam"), [people["person4"].uuid])
-
-# @snapshot_clickhouse_queries
-# def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
-# # No person querying here, so snapshots are more legible
-# # overridden from factory, since we need to add `funnel_order_type`
-
-# filters = {
-# "events": [
-# {"id": "sign up", "order": 0},
-# {
-# "id": "buy",
-# "properties": [{"type": "event", "key": "$version", "value": "xyz"}],
-# "order": 1,
-# },
-# ],
-# "insight": INSIGHT_FUNNELS,
-# "date_from": "2020-01-01",
-# "date_to": "2020-01-08",
-# "funnel_window_days": 7,
-# "breakdown_type": "event",
-# "breakdown": "$browser",
-# "breakdown_attribution_type": "step",
-# "breakdown_attribution_value": "1",
-# "funnel_order_type": "unordered",
-# }
-
-# # event
-# events_by_person = {
-# "person1": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 1, 12),
-# "properties": {"$browser": "Chrome", "$version": "xyz"},
-# },
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 1, 13),
-# "properties": {"$browser": "Chrome"},
-# },
-# # discarded because doesn't meet criteria
-# ],
-# "person2": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 13),
-# "properties": {"$browser": "Safari", "$version": "xyz"},
-# },
-# ],
-# "person3": [
-# {
-# "event": "sign up",
-# "timestamp": datetime(2020, 1, 2, 14),
-# "properties": {"$browser": "Mac"},
-# },
-# {
-# "event": "buy",
-# "timestamp": datetime(2020, 1, 2, 15),
-# "properties": {"$version": "xyz", "$browser": "Mac"},
-# },
-# ],
-# # no properties dude, doesn't make it to step 1, and since breakdown on step 1, is discarded completely
-# "person5": [
-# {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15)},
-# {"event": "buy", "timestamp": datetime(2020, 1, 2, 16)},
-# ],
-# }
-# journeys_for(events_by_person, self.team)
-
-# query = cast(FunnelsQuery, filter_to_query(filters))
-# results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
-# results = sorted(results, key=lambda res: res[0]["breakdown"])
-
-# self.assertEqual(len(results), 3)
-
-# self.assertCountEqual([res[0]["breakdown"] for res in results], [[""], ["Mac"], ["Safari"]])
+def _create_action(**kwargs):
+ team = kwargs.pop("team")
+ name = kwargs.pop("name")
+ properties = kwargs.pop("properties", {})
+ action = Action.objects.create(team=team, name=name)
+ ActionStep.objects.create(action=action, event=name, properties=properties)
+ return action
+
+
+class TestFunnelUnorderedStepsBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_test_factory( # type: ignore
+ FunnelOrderType.UNORDERED,
+ ClickhouseFunnelUnorderedActors,
+ _create_action,
+ _create_person,
+ ),
+):
+ maxDiff = None
+
+ def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self):
+ # overriden from factory
+
+ filters = {
+ "insight": INSIGHT_FUNNELS,
+ "funnel_order_type": "unordered",
+ "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}],
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": "$browser",
+ "breakdown_attribution_type": "all_events",
+ }
+
+ # event
+ person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk)
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person1",
+ properties={"key": "val", "$browser": "Chrome"},
+ timestamp="2020-01-01T12:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="sign up",
+ distinct_id="person1",
+ properties={"key": "val", "$browser": "Safari"},
+ timestamp="2020-01-02T13:00:00Z",
+ )
+ _create_event(
+ team=self.team,
+ event="play movie",
+ distinct_id="person1",
+ properties={"key": "val", "$browser": "Safari"},
+ timestamp="2020-01-02T14:00:00Z",
+ )
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+
+ assert_funnel_results_equal(
+ results[0],
+ [
+ {
+ "action_id": None,
+ "name": "Completed 1 step",
+ "custom_name": None,
+ "order": 0,
+ "people": [],
+ "count": 1,
+ "type": "events",
+ "average_conversion_time": None,
+ "median_conversion_time": None,
+ "breakdown": ["Chrome"],
+ "breakdown_value": ["Chrome"],
+ },
+ {
+ "action_id": None,
+ "name": "Completed 2 steps",
+ "custom_name": None,
+ "order": 1,
+ "people": [],
+ "count": 0,
+ "type": "events",
+ "average_conversion_time": None,
+ "median_conversion_time": None,
+ "breakdown": ["Chrome"],
+ "breakdown_value": ["Chrome"],
+ },
+ ],
+ )
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ["Chrome"]), [person1.uuid])
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Chrome"]), [])
+
+ assert_funnel_results_equal(
+ results[1],
+ [
+ {
+ "action_id": None,
+ "name": "Completed 1 step",
+ "custom_name": None,
+ "order": 0,
+ "people": [],
+ "count": 1,
+ "type": "events",
+ "average_conversion_time": None,
+ "median_conversion_time": None,
+ "breakdown": ["Safari"],
+ "breakdown_value": ["Safari"],
+ },
+ {
+ "action_id": None,
+ "name": "Completed 2 steps",
+ "custom_name": None,
+ "order": 1,
+ "people": [],
+ "count": 1,
+ "type": "events",
+ "average_conversion_time": 3600,
+ "median_conversion_time": 3600,
+ "breakdown": ["Safari"],
+ "breakdown_value": ["Safari"],
+ },
+ ],
+ )
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ["Safari"]), [person1.uuid])
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 2, ["Safari"]), [person1.uuid])
+
+ def test_funnel_step_breakdown_with_step_attribution(self):
+ # overridden from factory, since with no order, step one is step zero, and vice versa
+
+ filters = {
+ "insight": INSIGHT_FUNNELS,
+ "funnel_order_type": "unordered",
+ "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": ["$browser"],
+ "breakdown_attribution_type": "step",
+ "breakdown_attribution_value": "0",
+ }
+
+ # event
+ events_by_person = {
+ "person1": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
+ ],
+ "person2": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
+ ],
+ "person4": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
+ # step attribution means alakazam is valid when step = 1
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
+ ],
+ }
+ people = journeys_for(events_by_person, self.team)
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+ results = sorted(results, key=lambda res: res[0]["breakdown"])
+
+ self.assertEqual(len(results), 6)
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Mac"), [people["person3"].uuid])
+
+ def test_funnel_step_breakdown_with_step_one_attribution(self):
+ # overridden from factory, since with no order, step one is step zero, and vice versa
+ filters = {
+ "insight": INSIGHT_FUNNELS,
+ "funnel_order_type": "unordered",
+ "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": ["$browser"],
+ "breakdown_attribution_type": "step",
+ "breakdown_attribution_value": "1",
+ }
+
+ # event
+ events_by_person = {
+ "person1": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
+ ],
+ "person2": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari"},
+ },
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)},
+ ],
+ "person4": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
+ # step attribution means alakazam is valid when step = 1
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
+ ],
+ }
+ people = journeys_for(events_by_person, self.team)
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+ results = sorted(results, key=lambda res: res[0]["breakdown"])
+
+ self.assertEqual(len(results), 6)
+ # unordered, so everything is step one too.
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[0],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=[""], count=3),
+ FunnelStepResult(
+ name="Completed 2 steps",
+ breakdown=[""],
+ count=2,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
+ ),
+ ],
+ )
+
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filters, 1, ""),
+ [people["person1"].uuid, people["person2"].uuid, people["person3"].uuid],
+ )
+ self.assertCountEqual(
+ self._get_actor_ids_at_step(filters, 2, ""),
+ [people["person1"].uuid, people["person3"].uuid],
+ )
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[1],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
+ FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
+
+ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self):
+ # overridden from factory, since with no order, step one is step zero, and vice versa
+
+ filters = {
+ "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
+ "insight": INSIGHT_FUNNELS,
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": ["$browser"],
+ "breakdown_attribution_type": "step",
+ "breakdown_attribution_value": "1",
+ "funnel_order_type": "unordered",
+ }
+
+ # event
+ events_by_person = {
+ "person1": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
+ ],
+ "person2": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
+ # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
+ ],
+ "person4": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
+ # step attribution means alakazam is valid when step = 1
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
+ ],
+ }
+ people = journeys_for(events_by_person, self.team)
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+ results = sorted(results, key=lambda res: res[0]["breakdown"])
+
+ # Breakdown by step_1 means funnel items that never reach step_1 are NULLed out
+ self.assertEqual(len(results), 4)
+ # Chrome and Mac and Safari goes away
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[0],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=[""], count=1),
+ FunnelStepResult(
+ name="Completed 2 steps",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
+ ),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ""), [people["person1"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[1],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
+ FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[2],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["Chrome"], count=1),
+ FunnelStepResult(name="Completed 2 steps", breakdown=["Chrome"], count=0),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Chrome"), [people["person1"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[3],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["alakazam"], count=1),
+ FunnelStepResult(
+ name="Completed 2 steps",
+ breakdown=["alakazam"],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
+ ),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "alakazam"), [people["person4"].uuid])
+
+ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_funnel(self):
+ # overridden from factory, since with no order, step one is step zero, and vice versa
+
+ filters = {
+ "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}],
+ "insight": INSIGHT_FUNNELS,
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": "$browser",
+ "breakdown_attribution_type": "step",
+ "breakdown_attribution_value": "1",
+ "funnel_order_type": "unordered",
+ }
+
+ # event
+ events_by_person = {
+ "person1": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome"},
+ },
+ {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)},
+ ],
+ "person2": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
+ # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}
+ ],
+ "person4": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$browser": 0},
+ },
+ # step attribution means alakazam is valid when step = 1
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 16),
+ "properties": {"$browser": "alakazam"},
+ },
+ ],
+ }
+ people = journeys_for(events_by_person, self.team)
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+ results = sorted(results, key=lambda res: res[0]["breakdown"])
+
+ # Breakdown by step_1 means funnel items that never reach step_1 are NULLed out
+ self.assertEqual(len(results), 4)
+ # Chrome and Mac and Safari goes away
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[0],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=[""], count=1),
+ FunnelStepResult(
+ name="Completed 2 steps",
+ breakdown=[""],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
+ ),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, ""), [people["person1"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[1],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["0"], count=1),
+ FunnelStepResult(name="Completed 2 steps", breakdown=["0"], count=0),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "0"), [people["person4"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[2],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["Chrome"], count=1),
+ FunnelStepResult(name="Completed 2 steps", breakdown=["Chrome"], count=0),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "Chrome"), [people["person1"].uuid])
+
+ self._assert_funnel_breakdown_result_is_correct(
+ results[3],
+ [
+ FunnelStepResult(name="Completed 1 step", breakdown=["alakazam"], count=1),
+ FunnelStepResult(
+ name="Completed 2 steps",
+ breakdown=["alakazam"],
+ count=1,
+ average_conversion_time=3600,
+ median_conversion_time=3600,
+ ),
+ ],
+ )
+
+ self.assertCountEqual(self._get_actor_ids_at_step(filters, 1, "alakazam"), [people["person4"].uuid])
+
+ @snapshot_clickhouse_queries
+ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self):
+ # No person querying here, so snapshots are more legible
+ # overridden from factory, since we need to add `funnel_order_type`
+
+ filters = {
+ "events": [
+ {"id": "sign up", "order": 0},
+ {
+ "id": "buy",
+ "properties": [{"type": "event", "key": "$version", "value": "xyz"}],
+ "order": 1,
+ },
+ ],
+ "insight": INSIGHT_FUNNELS,
+ "date_from": "2020-01-01",
+ "date_to": "2020-01-08",
+ "funnel_window_days": 7,
+ "breakdown_type": "event",
+ "breakdown": "$browser",
+ "breakdown_attribution_type": "step",
+ "breakdown_attribution_value": "1",
+ "funnel_order_type": "unordered",
+ }
+
+ # event
+ events_by_person = {
+ "person1": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 1, 12),
+ "properties": {"$browser": "Chrome", "$version": "xyz"},
+ },
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 1, 13),
+ "properties": {"$browser": "Chrome"},
+ },
+ # discarded because doesn't meet criteria
+ ],
+ "person2": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)},
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 13),
+ "properties": {"$browser": "Safari", "$version": "xyz"},
+ },
+ ],
+ "person3": [
+ {
+ "event": "sign up",
+ "timestamp": datetime(2020, 1, 2, 14),
+ "properties": {"$browser": "Mac"},
+ },
+ {
+ "event": "buy",
+ "timestamp": datetime(2020, 1, 2, 15),
+ "properties": {"$version": "xyz", "$browser": "Mac"},
+ },
+ ],
+ # no properties dude, doesn't make it to step 1, and since breakdown on step 1, is discarded completely
+ "person5": [
+ {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15)},
+ {"event": "buy", "timestamp": datetime(2020, 1, 2, 16)},
+ ],
+ }
+ journeys_for(events_by_person, self.team)
+
+ query = cast(FunnelsQuery, filter_to_query(filters))
+ results = FunnelsQueryRunner(query=query, team=self.team).calculate().results
+ results = sorted(results, key=lambda res: res[0]["breakdown"])
+
+ self.assertEqual(len(results), 3)
+
+ self.assertCountEqual([res[0]["breakdown"] for res in results], [[""], ["Mac"], ["Safari"]])
+
+
+class TestUnorderedFunnelGroupBreakdown(
+ ClickhouseTestMixin,
+ funnel_breakdown_group_test_factory( # type: ignore
+ ClickhouseFunnelUnorderedActors,
+ ),
+):
+ pass
class TestFunnelUnorderedStepsConversionTime(
diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr
index ef3b23794866d..1320f6403b544 100644
--- a/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr
+++ b/posthog/hogql_queries/insights/test/__snapshots__/test_lifecycle_query_runner.ambr
@@ -79,7 +79,7 @@
WHERE and(equals(events.team_id, 2), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), minus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 00:00:00', 6, 'UTC'))), toIntervalDay(1))), less(toTimeZone(events.timestamp, 'UTC'), plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-19 23:59:59', 6, 'UTC'))), toIntervalDay(1))), ifNull(in(person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 4))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 5))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0), equals(events.event, '$pageview'))
GROUP BY person_id)
diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
index d9e0cd6ed6abf..210e465f805b0 100644
--- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
+++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
@@ -85,7 +85,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))), ifNull(equals(e__pdi__person.`properties___$bool_prop`, 'x'), 0), and(equals(e.event, 'sign up'), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 5))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 6))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)))
GROUP BY day_start)
@@ -172,7 +172,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-07 23:59:59', 6, 'UTC'))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.person_properties, '$bool_prop'), ''), 'null'), '^"|"$', ''), 'x'), 0), and(equals(e.event, 'sign up'), ifNull(in(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id),
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 6))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 7))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)))
GROUP BY day_start)
@@ -688,7 +688,7 @@
WHERE and(equals(e.team_id, 2), and(equals(e.event, '$pageview'), and(or(ifNull(equals(e__pdi__person.properties___name, 'p1'), 0), ifNull(equals(e__pdi__person.properties___name, 'p2'), 0), ifNull(equals(e__pdi__person.properties___name, 'p3'), 0)), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 25))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 26))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0))))
GROUP BY value
@@ -757,7 +757,7 @@
WHERE and(equals(e.team_id, 2), and(and(equals(e.event, '$pageview'), and(or(ifNull(equals(e__pdi__person.properties___name, 'p1'), 0), ifNull(equals(e__pdi__person.properties___name, 'p2'), 0), ifNull(equals(e__pdi__person.properties___name, 'p3'), 0)), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 25))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 26))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0))), or(ifNull(equals(transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_other_$$', 'val'], ['$$_posthog_breakdown_other_$$', 'val'], '$$_posthog_breakdown_other_$$'), '$$_posthog_breakdown_other_$$'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, 'key'), ''), 'null'), '^"|"$', ''), 'val'), 0))), ifNull(greaterOrEquals(timestamp, minus(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC')), toIntervalDay(7))), 0), ifNull(lessOrEquals(timestamp, assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-12 23:59:59', 6, 'UTC'))), 0))
GROUP BY timestamp, actor_id,
@@ -1592,7 +1592,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 38))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 39))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)))
GROUP BY value
@@ -1640,7 +1640,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(e__pdi.person_id,
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 38))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 39))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(ifNull(equals(transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_other_$$', 'value', 'other_value'], ['$$_posthog_breakdown_other_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$'), '$$_posthog_breakdown_other_$$'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0)))
GROUP BY day_start,
@@ -1691,7 +1691,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id),
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 39))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 40))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)))
GROUP BY value
@@ -1738,7 +1738,7 @@
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), and(equals(e.event, 'sign up'), ifNull(in(ifNull(nullIf(e__override.override_person_id, '00000000-0000-0000-0000-000000000000'), e.person_id),
(SELECT cohortpeople.person_id AS person_id
FROM cohortpeople
- WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 39))
+ WHERE and(equals(cohortpeople.team_id, 2), equals(cohortpeople.cohort_id, 40))
GROUP BY cohortpeople.person_id, cohortpeople.cohort_id, cohortpeople.version
HAVING ifNull(greater(sum(cohortpeople.sign), 0), 0))), 0)), or(ifNull(equals(transform(ifNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), '$$_posthog_breakdown_null_$$'), ['$$_posthog_breakdown_other_$$', 'value', 'other_value'], ['$$_posthog_breakdown_other_$$', 'value', 'other_value'], '$$_posthog_breakdown_other_$$'), '$$_posthog_breakdown_other_$$'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'value'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', ''), 'other_value'), 0)))
GROUP BY day_start,
diff --git a/posthog/hogql_queries/web_analytics/test/test_web_overview.py b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
index 0d560ee6c182e..e4fc03121ab1b 100644
--- a/posthog/hogql_queries/web_analytics/test/test_web_overview.py
+++ b/posthog/hogql_queries/web_analytics/test/test_web_overview.py
@@ -35,10 +35,11 @@ def _create_events(self, data, event="$pageview"):
)
return person_result
- def _run_web_overview_query(self, date_from, date_to):
+ def _run_web_overview_query(self, date_from, date_to, compare=True):
query = WebOverviewQuery(
dateRange=DateRange(date_from=date_from, date_to=date_to),
properties=[],
+ compare=compare,
)
runner = WebOverviewQueryRunner(team=self.team, query=query)
return runner.calculate()
@@ -95,24 +96,24 @@ def test_all_time(self):
]
)
- results = self._run_web_overview_query("all", "2023-12-15").results
+ results = self._run_web_overview_query("all", "2023-12-15", compare=False).results
visitors = results[0]
self.assertEqual("visitors", visitors.key)
self.assertEqual(2, visitors.value)
- self.assertEqual(0, visitors.previous)
+ self.assertEqual(None, visitors.previous)
self.assertEqual(None, visitors.changeFromPreviousPct)
views = results[1]
self.assertEqual("views", views.key)
self.assertEqual(4, views.value)
- self.assertEqual(0, views.previous)
+ self.assertEqual(None, views.previous)
self.assertEqual(None, views.changeFromPreviousPct)
sessions = results[2]
self.assertEqual("sessions", sessions.key)
self.assertEqual(3, sessions.value)
- self.assertEqual(0, sessions.previous)
+ self.assertEqual(None, sessions.previous)
self.assertEqual(None, sessions.changeFromPreviousPct)
duration_s = results[3]
diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py
index 26a9255c940cf..2019803faf78a 100644
--- a/posthog/hogql_queries/web_analytics/web_overview.py
+++ b/posthog/hogql_queries/web_analytics/web_overview.py
@@ -24,8 +24,9 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
mid = self.query_date_range.date_from_as_hogql()
end = self.query_date_range.date_to_as_hogql()
with self.timings.measure("overview_stats_query"):
- query = parse_select(
- """
+ if self.query.compare:
+ return parse_select(
+ """
WITH pages_query AS (
SELECT
uniq(if(timestamp >= {mid} AND timestamp < {end}, events.person_id, NULL)) AS unique_users,
@@ -86,21 +87,86 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
FROM pages_query
CROSS JOIN sessions_query
""",
- timings=self.timings,
- placeholders={
- "start": start,
- "mid": mid,
- "end": end,
- "event_properties": self.event_properties(),
- "session_where": self.session_where(include_previous_period=True),
- "session_having": self.session_having(include_previous_period=True),
- "sample_rate": self._sample_ratio,
- "sample_expr": ast.SampleExpr(sample_value=self._sample_ratio),
- },
- backend="cpp",
- )
+ timings=self.timings,
+ placeholders={
+ "start": start,
+ "mid": mid,
+ "end": end,
+ "event_properties": self.event_properties(),
+ "session_where": self.session_where(include_previous_period=True),
+ "session_having": self.session_having(include_previous_period=True),
+ "sample_rate": self._sample_ratio,
+ },
+ )
+ else:
+ return parse_select(
+ """
+WITH pages_query AS (
+ SELECT
+ uniq(events.person_id) AS unique_users,
+ count() AS current_pageviews,
+ uniq(events.properties.$session_id) AS unique_sessions
+ FROM
+ events
+ SAMPLE {sample_rate}
+ WHERE
+ event = '$pageview' AND
+ timestamp >= {mid} AND
+ timestamp < {end} AND
+ {event_properties}
+ ),
+sessions_query AS (
+ SELECT
+ avg(duration_s) AS avg_duration_s,
+ avg(is_bounce) AS bounce_rate
+ FROM (SELECT
+ events.properties.`$session_id` AS session_id,
+ min(events.timestamp) AS min_timestamp,
+ max(events.timestamp) AS max_timestamp,
+ dateDiff('second', min_timestamp, max_timestamp) AS duration_s,
+ countIf(events.event == '$pageview') AS num_pageviews,
+ countIf(events.event == '$autocapture') AS num_autocaptures,
- return query
+ -- definition of a GA4 bounce from here https://support.google.com/analytics/answer/12195621?hl=en
+ (num_autocaptures == 0 AND num_pageviews <= 1 AND duration_s < 10) AS is_bounce
+ FROM
+ events
+ SAMPLE {sample_rate}
+ WHERE
+ session_id IS NOT NULL
+ AND (events.event == '$pageview' OR events.event == '$autocapture' OR events.event == '$pageleave')
+ AND ({session_where})
+ GROUP BY
+ events.properties.`$session_id`
+ HAVING
+ ({session_having})
+ )
+ )
+SELECT
+ unique_users,
+ NULL as previous_unique_users,
+ current_pageviews,
+ NULL as previous_pageviews,
+ unique_sessions,
+ NULL as previous_unique_sessions,
+ avg_duration_s,
+ NULL as prev_avg_duration_s,
+ bounce_rate,
+ NULL as prev_bounce_rate
+FROM pages_query
+CROSS JOIN sessions_query
+ """,
+ timings=self.timings,
+ placeholders={
+ "start": start,
+ "mid": mid,
+ "end": end,
+ "event_properties": self.event_properties(),
+ "session_where": self.session_where(include_previous_period=False),
+ "session_having": self.session_having(include_previous_period=False),
+ "sample_rate": self._sample_ratio,
+ },
+ )
def calculate(self):
response = execute_hogql_query(
diff --git a/posthog/schema.py b/posthog/schema.py
index c82ea0682f99a..9e5fcc92a12b3 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -1540,6 +1540,7 @@ class WebOverviewQuery(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
+ compare: Optional[bool] = None
dateRange: Optional[DateRange] = None
kind: Literal["WebOverviewQuery"] = "WebOverviewQuery"
properties: List[Union[EventPropertyFilter, PersonPropertyFilter]]
diff --git a/posthog/session_recordings/queries/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py
index fb1f2ea30329e..8667a26d97d66 100644
--- a/posthog/session_recordings/queries/session_replay_events.py
+++ b/posthog/session_recordings/queries/session_replay_events.py
@@ -1,4 +1,4 @@
-from datetime import datetime
+from datetime import datetime, timedelta
from typing import Optional, Tuple, List
from django.conf import settings
@@ -124,8 +124,10 @@ def get_events(
hq = HogQLQuery(
query=q,
values={
- "start_time": metadata["start_time"],
- "end_time": metadata["end_time"],
+ # add some wiggle room to the timings, to ensure we get all the events
+ # the time range is only to stop CH loading too much data to find the session
+ "start_time": metadata["start_time"] - timedelta(seconds=100),
+ "end_time": metadata["end_time"] + timedelta(seconds=100),
"session_id": session_id,
"events_to_ignore": events_to_ignore,
},
diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py
index 9780602f53cb4..6f101fce80e7b 100644
--- a/posthog/session_recordings/session_recording_api.py
+++ b/posthog/session_recordings/session_recording_api.py
@@ -44,7 +44,7 @@
)
from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots, publish_subscription
-from posthog.session_recordings.session_summary.summarize_session import summarize_recording
+from ee.session_recordings.session_summary.summarize_session import summarize_recording
from posthog.session_recordings.snapshots.convert_legacy_snapshots import (
convert_original_version_lts_recording,
)
diff --git a/posthog/session_recordings/sql/session_replay_embeddings_sql.py b/posthog/session_recordings/sql/session_replay_embeddings_sql.py
new file mode 100644
index 0000000000000..dd0d26e7aca61
--- /dev/null
+++ b/posthog/session_recordings/sql/session_replay_embeddings_sql.py
@@ -0,0 +1,87 @@
+from django.conf import settings
+
+from posthog.clickhouse.table_engines import (
+ Distributed,
+ ReplicationScheme,
+ MergeTreeEngine,
+)
+
+"""
+We want to use ML to convert session replay data to embeddings, these will let us check similarity between sessions
+and so to cluster sessions. We will store the embeddings in a separate table to the session replay data, so we can
+easily iterate on th schema, and so we don't ever have to join recordings data in Postgres and CH
+
+Expected queries will be to load sets of embeddings, by team and date, and to insert embeddings for a session
+And to allow us to select sessions by similarity
+And to select sessions from session_replay_event which don't have an embedding yet (for processing)
+"""
+
+SESSION_REPLAY_EMBEDDINGS_DATA_TABLE = lambda: "sharded_session_replay_embeddings"
+
+SESSION_REPLAY_EMBEDDINGS_TABLE_BASE_SQL = """
+CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}'
+(
+ -- part of order by so will aggregate correctly
+ session_id VARCHAR,
+ -- part of order by so will aggregate correctly
+ team_id Int64,
+ embeddings Array(Float32),
+ generation_timestamp DateTime64(6, 'UTC') DEFAULT NOW('UTC'),
+ -- we will insert directly for the first test of this
+ -- so no _timestamp or _offset column
+ --_timestamp SimpleAggregateFunction(max, DateTime)
+) ENGINE = {engine}
+"""
+
+SESSION_REPLAY_EMBEDDINGS_DATA_TABLE_ENGINE = lambda: MergeTreeEngine(
+ "session_replay_embeddings", replication_scheme=ReplicationScheme.SHARDED
+)
+
+SESSION_REPLAY_EMBEDDINGS_TABLE_SQL = lambda: (
+ SESSION_REPLAY_EMBEDDINGS_TABLE_BASE_SQL
+ + """
+ PARTITION BY toYYYYMM(generation_timestamp)
+ -- order by must be in order of increasing cardinality
+ -- so we order by date first, then team_id, then session_id
+ -- hopefully, this is a good balance between the two
+ ORDER BY (toDate(generation_timestamp), team_id, session_id)
+ -- we don't want to keep embeddings forever, so we will set a TTL
+ -- the max any individual recording could survive is 1 year, so...
+ TTL toDate(generation_timestamp) + INTERVAL 1 YEAR
+SETTINGS index_granularity=512
+"""
+).format(
+ table_name=SESSION_REPLAY_EMBEDDINGS_DATA_TABLE(),
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ engine=SESSION_REPLAY_EMBEDDINGS_DATA_TABLE_ENGINE(),
+)
+
+# Distributed engine tables are only created if CLICKHOUSE_REPLICATED
+
+# This table is responsible for writing to sharded_session_replay_embeddings based on a sharding key.
+WRITABLE_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL = lambda: SESSION_REPLAY_EMBEDDINGS_TABLE_BASE_SQL.format(
+ table_name="writable_session_replay_embeddings",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ engine=Distributed(
+ data_table=SESSION_REPLAY_EMBEDDINGS_DATA_TABLE(),
+ sharding_key="sipHash64(session_id)",
+ ),
+)
+
+# This table is responsible for reading from session_replay_embeddings on a cluster setting
+DISTRIBUTED_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL = lambda: SESSION_REPLAY_EMBEDDINGS_TABLE_BASE_SQL.format(
+ table_name="session_replay_embeddings",
+ cluster=settings.CLICKHOUSE_CLUSTER,
+ engine=Distributed(
+ data_table=SESSION_REPLAY_EMBEDDINGS_DATA_TABLE(),
+ sharding_key="sipHash64(session_id)",
+ ),
+)
+
+DROP_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL = lambda: (
+ f"DROP TABLE IF EXISTS {SESSION_REPLAY_EMBEDDINGS_DATA_TABLE()} ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'"
+)
+
+TRUNCATE_SESSION_REPLAY_EMBEDDINGS_TABLE_SQL = lambda: (
+ f"TRUNCATE TABLE IF EXISTS {SESSION_REPLAY_EMBEDDINGS_DATA_TABLE()} ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'"
+)
diff --git a/posthog/settings/session_replay.py b/posthog/settings/session_replay.py
index e76866f599948..943e4466e82d2 100644
--- a/posthog/settings/session_replay.py
+++ b/posthog/settings/session_replay.py
@@ -1,4 +1,6 @@
-from posthog.settings import get_from_env
+from typing import List
+
+from posthog.settings import get_from_env, get_list, DEBUG
from posthog.utils import str_to_bool
# TRICKY: we saw unusual memory usage behavior in EU clickhouse cluster
@@ -15,3 +17,9 @@
REALTIME_SNAPSHOTS_FROM_REDIS_ATTEMPT_TIMEOUT_SECONDS = get_from_env(
"REALTIME_SNAPSHOTS_FROM_REDIS_ATTEMPT_TIMEOUT_SECONDS", 0.2, type_cast=float
)
+
+REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS = get_from_env(
+ "REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS", 30 if DEBUG else 300, type_cast=int
+)
+
+REPLAY_EMBEDDINGS_ALLOWED_TEAMS: List[str] = get_list(get_from_env("REPLAY_EMBEDDINGS_ALLOWED_TEAM", "", type_cast=str))
diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py
index 85d39d4bcb904..e7b29070db18a 100644
--- a/posthog/tasks/scheduled.py
+++ b/posthog/tasks/scheduled.py
@@ -45,6 +45,7 @@
update_event_partitions,
update_quota_limiting,
verify_persons_data_in_sync,
+ calculate_replay_embeddings,
)
from posthog.utils import get_crontab
@@ -238,6 +239,16 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None:
)
if settings.EE_AVAILABLE:
+ # every interval seconds, we calculate N replay embeddings
+ # the goal is to process _enough_ every 24 hours that
+ # there is a meaningful playlist to test with
+ add_periodic_task_with_expiry(
+ sender,
+ settings.REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS,
+ calculate_replay_embeddings.s(),
+ name="calculate replay embeddings",
+ )
+
sender.add_periodic_task(
crontab(hour="0", minute=str(randrange(0, 40))),
clickhouse_send_license_usage.s(),
diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py
index bc4b3910766ce..d3747fcb102a4 100644
--- a/posthog/tasks/tasks.py
+++ b/posthog/tasks/tasks.py
@@ -14,6 +14,10 @@
from posthog.redis import get_client
from posthog.tasks.utils import CeleryQueue
+from structlog import get_logger
+
+logger = get_logger(__name__)
+
@shared_task(ignore_result=True)
def delete_expired_exported_assets() -> None:
@@ -716,3 +720,17 @@ def check_data_import_row_limits() -> None:
pass
else:
check_synced_row_limits()
+
+
+# this task runs a CH query and triggers other tasks
+# it can run on the default queue
+@shared_task(ignore_result=True)
+def calculate_replay_embeddings() -> None:
+ try:
+ from ee.tasks.replay import generate_recordings_embeddings_batch
+
+ generate_recordings_embeddings_batch()
+ except ImportError:
+ pass
+ except Exception as e:
+ logger.error("Failed to calculate replay embeddings", error=e, exc_info=True)
diff --git a/posthog/tasks/utils.py b/posthog/tasks/utils.py
index ecabc29adac79..fa2c33cb2bacb 100644
--- a/posthog/tasks/utils.py
+++ b/posthog/tasks/utils.py
@@ -35,3 +35,4 @@ class CeleryQueue(Enum):
EXPORTS = "exports"
SUBSCRIPTION_DELIVERY = "subscription_delivery"
USAGE_REPORTS = "usage_reports"
+ SESSION_REPLAY_EMBEDDINGS = "session_replay_embeddings"